import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
df_dtype={'MachineIdentifier': 'category',
'ProductName': 'category',
'EngineVersion': 'category',
'AppVersion': 'category',
'AvSigVersion': 'category',
'Platform': 'category',
'Processor': 'category',
'OsVer': 'category',
'OsPlatformSubRelease': 'category',
'OsBuildLab': 'category',
'SkuEdition': 'category',
'PuaMode': 'category',
'SmartScreen': 'category',
'Census_MDC2FormFactor': 'category',
'Census_DeviceFamily': 'category',
'Census_ProcessorClass': 'category',
'Census_PrimaryDiskTypeName': 'category',
'Census_ChassisTypeName': 'category',
'Census_PowerPlatformRoleName': 'category',
'Census_InternalBatteryType': 'category',
'Census_OSVersion': 'category',
'Census_OSArchitecture': 'category',
'Census_OSBranch': 'category',
'Census_OSEdition': 'category',
'Census_OSSkuName': 'category',
'Census_OSInstallTypeName': 'category',
'Census_OSWUAutoUpdateOptionsName': 'category',
'Census_GenuineStateName': 'category',
'Census_ActivationChannel': 'category',
'Census_FlightRing': 'category',
'RtpStateBitfield': 'float16',
'DefaultBrowsersIdentifier': 'float16',
'AVProductsInstalled': 'float16',
'AVProductsEnabled': 'float16',
'OrganizationIdentifier': 'float16',
'GeoNameIdentifier': 'float16',
'IsProtected': 'float16',
'SMode': 'float16',
'IeVerIdentifier': 'float16',
'Firewall': 'float16',
'Census_OEMNameIdentifier': 'float16',
'Census_ProcessorCoreCount': 'float16',
'Census_ProcessorManufacturerIdentifier': 'float16',
'Census_ProcessorModelIdentifier': 'float16',
'Census_InternalPrimaryDiagonalDisplaySizeInInches': 'float16',
'Census_InternalPrimaryDisplayResolutionHorizontal': 'float16',
'Census_InternalPrimaryDisplayResolutionVertical': 'float16',
'Census_OSInstallLanguageIdentifier': 'float16',
'Census_IsFlightingInternal': 'float16',
'Census_IsFlightsDisabled': 'float16',
'Census_ThresholdOptIn': 'float16',
'Census_FirmwareManufacturerIdentifier': 'float16',
'Census_IsWIMBootEnabled': 'float16',
'Census_IsVirtualDevice': 'float16',
'Census_IsAlwaysOnAlwaysConnectedCapable': 'float16',
'Wdft_IsGamer': 'float16',
'Wdft_RegionIdentifier': 'float16',
'AVProductStatesIdentifier': 'float32',
'CityIdentifier': 'float32',
'UacLuaenable': 'float32',
'Census_OEMModelIdentifier': 'float32',
'Census_PrimaryDiskTotalCapacity': 'float32',
'Census_SystemVolumeTotalCapacity': 'float32',
'Census_TotalPhysicalRAM': 'float32',
'Census_InternalBatteryNumberOfCharges': 'float32',
'Census_FirmwareVersionIdentifier': 'float32',
'IsBeta': 'int32',
'IsSxsPassiveMode': 'int32',
'HasTpm': 'int32',
'CountryIdentifier': 'int32',
'LocaleEnglishNameIdentifier': 'int32',
'OsBuild': 'int32',
'OsSuite': 'int32',
'AutoSampleOptIn': 'int32',
'Census_HasOpticalDiskDrive': 'int32',
'Census_OSBuildNumber': 'int32',
'Census_OSBuildRevision': 'int32',
'Census_OSUILocaleIdentifier': 'int32',
'Census_IsPortableOperatingSystem': 'int32',
'Census_IsSecureBootEnabled': 'int32',
'Census_IsTouchEnabled': 'int32',
'Census_IsPenCapable': 'int32',
'HasDetections': 'int32'}
%%time
df=pd.read_csv("train.csv",dtype=df_dtype)#AUC of 6.75 for 5000000
Wall time: 4min 21s
df_dtype={'MachineIdentifier': 'category',
'ProductName': 'category',
'EngineVersion': 'category',
'AppVersion': 'category',
'AvSigVersion': 'category',
'Platform': 'category',
'Processor': 'category',
'OsVer': 'category',
'OsPlatformSubRelease': 'category',
'OsBuildLab': 'category',
'SkuEdition': 'category',
'PuaMode': 'category',
'SmartScreen': 'category',
'Census_MDC2FormFactor': 'category',
'Census_DeviceFamily': 'category',
'Census_ProcessorClass': 'category',
'Census_PrimaryDiskTypeName': 'category',
'Census_ChassisTypeName': 'category',
'Census_PowerPlatformRoleName': 'category',
'Census_InternalBatteryType': 'category',
'Census_OSVersion': 'category',
'Census_OSArchitecture': 'category',
'Census_OSBranch': 'category',
'Census_OSEdition': 'category',
'Census_OSSkuName': 'category',
'Census_OSInstallTypeName': 'category',
'Census_OSWUAutoUpdateOptionsName': 'category',
'Census_GenuineStateName': 'category',
'Census_ActivationChannel': 'category',
'Census_FlightRing': 'category',
'RtpStateBitfield': 'float16',
'DefaultBrowsersIdentifier': 'float16',
'AVProductsInstalled': 'float16',
'AVProductsEnabled': 'float16',
'OrganizationIdentifier': 'float16',
'GeoNameIdentifier': 'float16',
'IsProtected': 'float16',
'SMode': 'float16',
'IeVerIdentifier': 'float16',
'Firewall': 'float16',
'Census_OEMNameIdentifier': 'float16',
'Census_ProcessorCoreCount': 'float16',
'Census_ProcessorManufacturerIdentifier': 'float16',
'Census_ProcessorModelIdentifier': 'float16',
'Census_InternalPrimaryDiagonalDisplaySizeInInches': 'float16',
'Census_InternalPrimaryDisplayResolutionHorizontal': 'float16',
'Census_InternalPrimaryDisplayResolutionVertical': 'float16',
'Census_OSInstallLanguageIdentifier': 'float16',
'Census_IsFlightingInternal': 'float16',
'Census_IsFlightsDisabled': 'float16',
'Census_ThresholdOptIn': 'float16',
'Census_FirmwareManufacturerIdentifier': 'float16',
'Census_IsWIMBootEnabled': 'float16',
'Census_IsVirtualDevice': 'float16',
'Census_IsAlwaysOnAlwaysConnectedCapable': 'float16',
'Wdft_IsGamer': 'float16',
'Wdft_RegionIdentifier': 'float16',
'AVProductStatesIdentifier': 'float32',
'CityIdentifier': 'float32',
'UacLuaenable': 'float32',
'Census_OEMModelIdentifier': 'float32',
'Census_PrimaryDiskTotalCapacity': 'float32',
'Census_SystemVolumeTotalCapacity': 'float32',
'Census_TotalPhysicalRAM': 'float32',
'Census_InternalBatteryNumberOfCharges': 'float32',
'Census_FirmwareVersionIdentifier': 'float32',
'IsBeta': 'int32',
'IsSxsPassiveMode': 'int32',
'HasTpm': 'int32',
'CountryIdentifier': 'int32',
'LocaleEnglishNameIdentifier': 'int32',
'OsBuild': 'int32',
'OsSuite': 'int32',
'AutoSampleOptIn': 'int32',
'Census_HasOpticalDiskDrive': 'int32',
'Census_OSBuildNumber': 'int32',
'Census_OSBuildRevision': 'int32',
'Census_OSUILocaleIdentifier': 'int32',
'Census_IsPortableOperatingSystem': 'int32',
'Census_IsSecureBootEnabled': 'int32',
'Census_IsTouchEnabled': 'int32',
'Census_IsPenCapable': 'int32'}
%%time
df_test=pd.read_csv("test.csv",dtype=df_dtype)#AUC of 6.46 for 5000000
Wall time: 4min 16s
solution=pd.DataFrame()
solution['MachineIdentifier']=df_test['MachineIdentifier']
df.shape
(8921483, 83)
df.memory_usage(deep=True).sum()
2861438459
df.head()
| MachineIdentifier | ProductName | EngineVersion | AppVersion | AvSigVersion | IsBeta | RtpStateBitfield | IsSxsPassiveMode | DefaultBrowsersIdentifier | AVProductStatesIdentifier | ... | Census_FirmwareVersionIdentifier | Census_IsSecureBootEnabled | Census_IsWIMBootEnabled | Census_IsVirtualDevice | Census_IsTouchEnabled | Census_IsPenCapable | Census_IsAlwaysOnAlwaysConnectedCapable | Wdft_IsGamer | Wdft_RegionIdentifier | HasDetections | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0000028988387b115f69f31a3bf04f09 | win8defender | 1.1.15100.1 | 4.18.1807.18075 | 1.273.1735.0 | 0 | 7.0 | 0 | NaN | 53447.0 | ... | 36144.0 | 0 | NaN | 0.0 | 0 | 0 | 0.0 | 0.0 | 10.0 | 0 |
| 1 | 000007535c3f730efa9ea0b7ef1bd645 | win8defender | 1.1.14600.4 | 4.13.17134.1 | 1.263.48.0 | 0 | 7.0 | 0 | NaN | 53447.0 | ... | 57858.0 | 0 | NaN | 0.0 | 0 | 0 | 0.0 | 0.0 | 8.0 | 0 |
| 2 | 000007905a28d863f6d0d597892cd692 | win8defender | 1.1.15100.1 | 4.18.1807.18075 | 1.273.1341.0 | 0 | 7.0 | 0 | NaN | 53447.0 | ... | 52682.0 | 0 | NaN | 0.0 | 0 | 0 | 0.0 | 0.0 | 3.0 | 0 |
| 3 | 00000b11598a75ea8ba1beea8459149f | win8defender | 1.1.15100.1 | 4.18.1807.18075 | 1.273.1527.0 | 0 | 7.0 | 0 | NaN | 53447.0 | ... | 20050.0 | 0 | NaN | 0.0 | 0 | 0 | 0.0 | 0.0 | 3.0 | 1 |
| 4 | 000014a5f00daa18e76b81417eeb99fc | win8defender | 1.1.15100.1 | 4.18.1807.18075 | 1.273.1379.0 | 0 | 7.0 | 0 | NaN | 53447.0 | ... | 19844.0 | 0 | 0.0 | 0.0 | 0 | 0 | 0.0 | 0.0 | 1.0 | 1 |
5 rows × 83 columns
df.describe().T
| count | mean | std | min | 25% | 50% | 75% | max | |
|---|---|---|---|---|---|---|---|---|
| IsBeta | 8921483.0 | 7.509962e-06 | 2.740421e-03 | 0.000000 | 0.000000 | 0.0 | 0.000000e+00 | 1.000000e+00 |
| RtpStateBitfield | 8889165.0 | NaN | 0.000000e+00 | 0.000000 | 7.000000 | 7.0 | 7.000000e+00 | 3.500000e+01 |
| IsSxsPassiveMode | 8921483.0 | 1.733378e-02 | 1.305118e-01 | 0.000000 | 0.000000 | 0.0 | 0.000000e+00 | 1.000000e+00 |
| DefaultBrowsersIdentifier | 433438.0 | NaN | NaN | 1.000000 | 788.000000 | 1632.0 | 2.372000e+03 | 3.212000e+03 |
| AVProductStatesIdentifier | 8885262.0 | 4.948320e+04 | 1.379994e+04 | 3.000000 | 49480.000000 | 53447.0 | 5.344700e+04 | 7.050700e+04 |
| AVProductsInstalled | 8885262.0 | NaN | 0.000000e+00 | 0.000000 | 1.000000 | 1.0 | 2.000000e+00 | 7.000000e+00 |
| AVProductsEnabled | 8885262.0 | NaN | 0.000000e+00 | 0.000000 | 1.000000 | 1.0 | 1.000000e+00 | 5.000000e+00 |
| HasTpm | 8921483.0 | 9.879711e-01 | 1.090149e-01 | 0.000000 | 1.000000 | 1.0 | 1.000000e+00 | 1.000000e+00 |
| CountryIdentifier | 8921483.0 | 1.080490e+02 | 6.304706e+01 | 1.000000 | 51.000000 | 97.0 | 1.620000e+02 | 2.220000e+02 |
| CityIdentifier | 8596074.0 | 8.049152e+04 | 4.873461e+04 | 5.000000 | 36825.000000 | 82373.0 | 1.237000e+05 | 1.679620e+05 |
| OrganizationIdentifier | 6169965.0 | NaN | 0.000000e+00 | 1.000000 | 18.000000 | 27.0 | 2.700000e+01 | 5.200000e+01 |
| GeoNameIdentifier | 8921270.0 | NaN | NaN | 1.000000 | 89.000000 | 181.0 | 2.670000e+02 | 2.960000e+02 |
| LocaleEnglishNameIdentifier | 8921483.0 | 1.228161e+02 | 6.932125e+01 | 1.000000 | 74.000000 | 88.0 | 1.820000e+02 | 2.830000e+02 |
| OsBuild | 8921483.0 | 1.571997e+04 | 2.190685e+03 | 7600.000000 | 15063.000000 | 16299.0 | 1.713400e+04 | 1.824400e+04 |
| OsSuite | 8921483.0 | 5.751534e+02 | 2.480847e+02 | 16.000000 | 256.000000 | 768.0 | 7.680000e+02 | 7.840000e+02 |
| IsProtected | 8885439.0 | NaN | 0.000000e+00 | 0.000000 | 1.000000 | 1.0 | 1.000000e+00 | 1.000000e+00 |
| AutoSampleOptIn | 8921483.0 | 2.891896e-05 | 5.377558e-03 | 0.000000 | 0.000000 | 0.0 | 0.000000e+00 | 1.000000e+00 |
| SMode | 8383724.0 | 4.649162e-04 | 2.104187e-02 | 0.000000 | 0.000000 | 0.0 | 0.000000e+00 | 1.000000e+00 |
| IeVerIdentifier | 8862589.0 | NaN | NaN | 1.000000 | 111.000000 | 117.0 | 1.370000e+02 | 4.290000e+02 |
| Firewall | 8830133.0 | NaN | 0.000000e+00 | 0.000000 | 1.000000 | 1.0 | 1.000000e+00 | 1.000000e+00 |
| UacLuaenable | 8910645.0 | 1.216142e+01 | 9.867765e+03 | 0.000000 | 1.000000 | 1.0 | 1.000000e+00 | 1.677722e+07 |
| Census_OEMNameIdentifier | 8826005.0 | NaN | NaN | 1.000000 | 1443.000000 | 2102.0 | 2.668000e+03 | 6.144000e+03 |
| Census_OEMModelIdentifier | 8819250.0 | 2.378578e+05 | 7.148446e+04 | 1.000000 | 189692.000000 | 247458.0 | 3.044180e+05 | 3.454980e+05 |
| Census_ProcessorCoreCount | 8880177.0 | NaN | 0.000000e+00 | 1.000000 | 2.000000 | 4.0 | 4.000000e+00 | 1.920000e+02 |
| Census_ProcessorManufacturerIdentifier | 8880170.0 | NaN | 0.000000e+00 | 1.000000 | 5.000000 | 5.0 | 5.000000e+00 | 1.000000e+01 |
| Census_ProcessorModelIdentifier | 8880140.0 | NaN | NaN | 2.000000 | 1998.000000 | 2500.0 | 2.874000e+03 | 4.480000e+03 |
| Census_PrimaryDiskTotalCapacity | 8868467.0 | 2.912138e+06 | 4.451633e+09 | 0.000000 | 239372.000000 | 476940.0 | 9.538690e+05 | 8.160437e+12 |
| Census_SystemVolumeTotalCapacity | 8868481.0 | 3.823069e+05 | 3.233614e+05 | 0.000000 | 120775.000000 | 249500.0 | 4.759730e+05 | 4.768710e+07 |
| Census_HasOpticalDiskDrive | 8921483.0 | 7.718728e-02 | 2.668884e-01 | 0.000000 | 0.000000 | 0.0 | 0.000000e+00 | 1.000000e+00 |
| Census_TotalPhysicalRAM | 8840950.0 | 6.109390e+03 | 4.971148e+03 | 255.000000 | 4096.000000 | 4096.0 | 8.192000e+03 | 1.572864e+06 |
| Census_InternalPrimaryDiagonalDisplaySizeInInches | 8874349.0 | NaN | 0.000000e+00 | 0.700195 | 13.898438 | 15.5 | 1.720312e+01 | 1.822500e+02 |
| Census_InternalPrimaryDisplayResolutionHorizontal | 8874497.0 | NaN | NaN | -1.000000 | 1366.000000 | 1366.0 | 1.920000e+03 | 1.228800e+04 |
| Census_InternalPrimaryDisplayResolutionVertical | 8874497.0 | NaN | NaN | -1.000000 | 768.000000 | 768.0 | 1.080000e+03 | 8.640000e+03 |
| Census_InternalBatteryNumberOfCharges | 8652728.0 | 1.123782e+09 | 1.933305e+09 | 0.000000 | 0.000000 | 0.0 | 4.294967e+09 | 4.294967e+09 |
| Census_OSBuildNumber | 8921483.0 | 1.583483e+04 | 1.961743e+03 | 7600.000000 | 15063.000000 | 16299.0 | 1.713400e+04 | 1.824400e+04 |
| Census_OSBuildRevision | 8921483.0 | 9.730490e+02 | 2.931971e+03 | 0.000000 | 167.000000 | 285.0 | 5.470000e+02 | 4.173600e+04 |
| Census_OSInstallLanguageIdentifier | 8861399.0 | NaN | 0.000000e+00 | 1.000000 | 8.000000 | 9.0 | 2.000000e+01 | 3.900000e+01 |
| Census_OSUILocaleIdentifier | 8921483.0 | 6.046534e+01 | 4.499992e+01 | 1.000000 | 31.000000 | 34.0 | 9.000000e+01 | 1.620000e+02 |
| Census_IsPortableOperatingSystem | 8921483.0 | 5.452008e-04 | 2.334317e-02 | 0.000000 | 0.000000 | 0.0 | 0.000000e+00 | 1.000000e+00 |
| Census_IsFlightingInternal | 1512724.0 | 1.388788e-05 | 3.726959e-03 | 0.000000 | 0.000000 | 0.0 | 0.000000e+00 | 1.000000e+00 |
| Census_IsFlightsDisabled | 8760960.0 | 1.007318e-05 | 3.173828e-03 | 0.000000 | 0.000000 | 0.0 | 0.000000e+00 | 1.000000e+00 |
| Census_ThresholdOptIn | 3254158.0 | 2.508163e-04 | 1.582336e-02 | 0.000000 | 0.000000 | 0.0 | 0.000000e+00 | 1.000000e+00 |
| Census_FirmwareManufacturerIdentifier | 8738226.0 | NaN | NaN | 2.000000 | 142.000000 | 500.0 | 5.560000e+02 | 1.092000e+03 |
| Census_FirmwareVersionIdentifier | 8761350.0 | 3.268055e+04 | 2.112612e+04 | 3.000000 | 13156.000000 | 33070.0 | 5.243600e+04 | 7.210500e+04 |
| Census_IsSecureBootEnabled | 8921483.0 | 4.860229e-01 | 4.998046e-01 | 0.000000 | 0.000000 | 0.0 | 1.000000e+00 | 1.000000e+00 |
| Census_IsWIMBootEnabled | 3261780.0 | 2.980232e-07 | 5.459785e-04 | 0.000000 | 0.000000 | 0.0 | 0.000000e+00 | 1.000000e+00 |
| Census_IsVirtualDevice | 8905530.0 | 7.202148e-03 | 8.453369e-02 | 0.000000 | 0.000000 | 0.0 | 0.000000e+00 | 1.000000e+00 |
| Census_IsTouchEnabled | 8921483.0 | 1.255431e-01 | 3.313338e-01 | 0.000000 | 0.000000 | 0.0 | 0.000000e+00 | 1.000000e+00 |
| Census_IsPenCapable | 8921483.0 | 3.807091e-02 | 1.913675e-01 | 0.000000 | 0.000000 | 0.0 | 0.000000e+00 | 1.000000e+00 |
| Census_IsAlwaysOnAlwaysConnectedCapable | 8850140.0 | NaN | 0.000000e+00 | 0.000000 | 0.000000 | 0.0 | 0.000000e+00 | 1.000000e+00 |
| Wdft_IsGamer | 8618032.0 | NaN | 0.000000e+00 | 0.000000 | 0.000000 | 0.0 | 1.000000e+00 | 1.000000e+00 |
| Wdft_RegionIdentifier | 8618032.0 | NaN | 0.000000e+00 | 1.000000 | 3.000000 | 10.0 | 1.100000e+01 | 1.500000e+01 |
| HasDetections | 8921483.0 | 4.997927e-01 | 5.000000e-01 | 0.000000 | 0.000000 | 0.0 | 1.000000e+00 | 1.000000e+00 |
print("Number of Columns : ",len(df.columns))
print("Number of Rows : ",len(df))
Number of Columns : 83 Number of Rows : 8921483
l=df.columns[df.isnull().any()].tolist()
drop_list=[]
delete_rows_list=[]
treat_list=[]
for column in l:
#print("Number of Null Values for : "+str(column)+" "+str(df[column].isnull().sum())+" "+str(df[column].isnull().sum()*100/len(df))+"%")
if df[column].isnull().sum()*100/len(df)>=70:#dropping columns with more than 70% of missing values
drop_list.append(column)
elif df[column].isnull().sum()*100/len(df)<=30:#Deleting rows having columns upto 30% missing values
delete_rows_list.append(column)
else:#Treating null values in the remainder of the columns
treat_list.append(column)
df.drop(columns=drop_list,axis=1,inplace=True)#Dropping columns with more than 60% of missing values
df_test.drop(columns=drop_list,axis=1,inplace=True)#Dropping columns with more than 60% of missing values
#df.dropna(subset=delete_rows_list,axis=0,inplace=True)
import statistics
statistics.mode(df.SmartScreen)
'RequireAdmin'
df.SmartScreen.fillna(statistics.mode(df.SmartScreen),inplace=True)
#df.OrganizationIdentifier.fillna(np.nanmedian(df.OrganizationIdentifier),inplace=True)
df.shape
(8921483, 78)
t=df.isnull().sum().reset_index().sort_values(ascending=False,by=[0])#.plot(kind='barh',figsize=(12,7))
fig,ax=plt.subplots(figsize=(12,7))
sns.barplot(x=t[t[0]>0][0],y=t[t[0]>0]['index'],ax=ax)
ax.set_title("Missing Values in features")
ax.set_xlabel("Number of Missing Values")
ax.set_ylabel("Features")
plt.show()
df=df[~df.SmartScreen.isin(['','',''])]
df.shape
(8920731, 78)
df['SmartScreen']=df.SmartScreen.str.lower()
df_test['SmartScreen']=df_test.SmartScreen.str.lower()
df=df[~df.SmartScreen.isin(['promt'])]
df=df[~df.SmartScreen.isin(['00000000'])]
df=df[~df.SmartScreen.isin(['enabled'])]
df=df[df.SmartScreen!='0']
df.SmartScreen.value_counts()
requireadmin 7493205 existsnotset 1046183 off 187907 warn 135484 prompt 34534 block 22533 on 878 Name: SmartScreen, dtype: int64
df_test.SmartScreen.value_counts()
requireadmin 3413570 existsnotset 600446 off 163161 warn 125926 prompt 28889 block 21242 on 939  404  267 0 3 promprt 1 deny 1 of 1 requiredadmin 1 Name: SmartScreen, dtype: int64
df_test['SmartScreen']=df_test.SmartScreen.str.replace('','existsnotset')
df_test['SmartScreen']=df_test.SmartScreen.str.replace('','existsnotset')
df_test['SmartScreen']=df_test.SmartScreen.str.replace('0','off')
df_test['SmartScreen']=df_test.SmartScreen.str.replace('deny','existsnotset')
df_test['SmartScreen']=df_test.SmartScreen.str.replace('requiredadmin','existsnotset')
df_test['SmartScreen']=df_test.SmartScreen.str.replace('of','existsnotset')
df_test['SmartScreen']=df_test.SmartScreen.str.replace('promprt','existsnotset')
df_test.SmartScreen.value_counts()
requireadmin 3413570 existsnotset 601121 existsnotsetf 163164 warn 125926 prompt 28889 block 21242 on 939 Name: SmartScreen, dtype: int64
df.Platform.unique()
[windows10, windows7, windows8, windows2016] Categories (4, object): [windows10, windows7, windows8, windows2016]
df[df.SmartScreen.isin(['on'])]['Platform'].value_counts()
windows10 876 windows7 2 windows8 0 windows2016 0 Name: Platform, dtype: int64
df[df.Platform=='windows8']['SmartScreen'].value_counts()
requireadmin 158471 existsnotset 33345 off 1605 prompt 1074 Name: SmartScreen, dtype: int64
df[df.Platform=='windows7']['SmartScreen'].value_counts()
requireadmin 89306 existsnotset 4416 off 159 on 2 warn 1 Name: SmartScreen, dtype: int64
df[df.Platform=='windows10']['SmartScreen'].value_counts()
requireadmin 7242077 existsnotset 1008045 off 177058 warn 135483 prompt 31908 block 22533 on 876 Name: SmartScreen, dtype: int64
df[df.Platform=='windows2016']['SmartScreen'].value_counts()
off 9085 requireadmin 3351 prompt 1552 existsnotset 377 Name: SmartScreen, dtype: int64
df[df.SmartScreen=='prompt']['Platform'].value_counts()
windows10 31908 windows2016 1552 windows8 1074 windows7 0 Name: Platform, dtype: int64
df.OsBuild.nunique()
76
df_test.OsBuild.nunique()
78
df.OsBuild.value_counts().head()
17134 3915214 16299 2503359 15063 780211 14393 730782 10586 411599 Name: OsBuild, dtype: int64
df[df.OsVer.str.startswith('6.1.')]['Platform'].value_counts()
windows7 93884 windows8 0 windows2016 0 windows10 0 Name: Platform, dtype: int64
df[df.OsVer.str.startswith('6.3.')]['Platform'].value_counts()
windows8 194495 windows7 0 windows2016 0 windows10 0 Name: Platform, dtype: int64
df[df.OsVer.str.startswith('10.0.0.0')]['Platform'].value_counts()
windows10 8617439 windows2016 14365 windows8 0 windows7 0 Name: Platform, dtype: int64
categorical_columns=['ProductName', 'EngineVersion', 'AppVersion','AvSigVersion','RtpStateBitfield','AVProductStatesIdentifier','CountryIdentifier', 'CityIdentifier','OrganizationIdentifier', 'GeoNameIdentifier','LocaleEnglishNameIdentifier', 'Platform', 'Processor', 'OsVer','OsBuild', 'OsSuite', 'OsPlatformSubRelease', 'OsBuildLab','SkuEdition','IeVerIdentifier', 'SmartScreen','UacLuaenable','Census_MDC2FormFactor','Census_DeviceFamily', 'Census_OEMNameIdentifier','Census_OEMModelIdentifier','Census_ProcessorManufacturerIdentifier','Census_ProcessorModelIdentifier','Census_PrimaryDiskTypeName','Census_ChassisTypeName','Census_PowerPlatformRoleName','Census_OSVersion', 'Census_OSArchitecture', 'Census_OSBranch','Census_OSBuildNumber', 'Census_OSBuildRevision', 'Census_OSEdition','Census_OSSkuName', 'Census_OSInstallTypeName', 'Census_OSInstallLanguageIdentifier', 'Census_OSUILocaleIdentifier','Census_OSWUAutoUpdateOptionsName','Census_GenuineStateName', 'Census_ActivationChannel','Census_FlightRing','Census_FirmwareManufacturerIdentifier','Census_FirmwareVersionIdentifier','Wdft_RegionIdentifier']
numerical_columns=['Census_ProcessorCoreCount','Census_PrimaryDiskTotalCapacity','Census_SystemVolumeTotalCapacity','Census_TotalPhysicalRAM','Census_InternalPrimaryDiagonalDisplaySizeInInches','Census_InternalPrimaryDisplayResolutionHorizontal','Census_InternalPrimaryDisplayResolutionVertical','Census_InternalBatteryNumberOfCharges']
binary_columns=list(set(df.columns)-set(categorical_columns)-set(numerical_columns)-set(['MachineIdentifier','HasDetections']))
df[categorical_columns]=df[categorical_columns].astype('category')
#Creating a dictionary for missing values in
column_value_replacements={}
for col in df.columns[1:]:
if col in df.select_dtypes(include=['category','object']).columns:
column_value_replacements[col]=statistics.mode(df[df[col].notna()][col])
else:
column_value_replacements[col]=np.nanmedian(df[col])
country_city={}
for val in df['CountryIdentifier'].unique():
country_city[val]=statistics.mode(df[df.CountryIdentifier==val]['CityIdentifier'])
%%time
df['CityIdentifier']=[country_city[y] if pd.isnull(x) else x for x,y in zip(df.CityIdentifier,df.CountryIdentifier)]
Wall time: 9 s
%%time
df_test['CityIdentifier']=[country_city[y] if pd.isnull(x) else x for x,y in zip(df_test.CityIdentifier,df_test.CountryIdentifier)]
Wall time: 7.34 s
df_test[categorical_columns]=df_test[categorical_columns].astype('category')
%%time
for col in df_test.columns[df_test.isnull().any()]:
df_test[col].fillna(column_value_replacements[col],inplace=True)
Wall time: 17.1 s
%%time
for col in df.columns[df.isnull().any()]:
df[col].fillna(column_value_replacements[col],inplace=True)
Wall time: 19.5 s
def reduce_memory(df):
initial=df.memory_usage(deep=True).sum()
print("Changing Datatypes please wait.......")
for col in df.select_dtypes(include=['int64','int32','float64','float32','int16']).columns:
try:
if df[col].dtype in ['int8','int16','int32','int64']:
if (np.max(df[col])<np.iinfo('int8').max)&(np.min(df[col])>np.iinfo('int8').min):
#print(str(col)+" changing datatype from "+str(df[col].dtype)+" to " +'int8')
df[col]=df[col].astype('int8')
elif (np.max(df[col])<np.iinfo('int16').max)&(np.min(df[col])>np.iinfo('int16').min):
#print(str(col)+" changing datatype from "+str(df[col].dtype)+" to " +'int16')
df[col]=df[col].astype('int16')
elif (np.max(df[col])<np.iinfo('int32').max)&(np.min(df[col])>np.iinfo('int32').min):
#print(str(col)+" changing datatype from "+str(df[col].dtype)+" to " +'int32')
df[col]=df[col].astype('int32')
else:
#print(str(col)+" changing datatype from "+str(df[col].dtype)+" to " +'int64')
df[col]=df[col].astype('int64')
else:
if (np.max(df[col])<np.finfo('float16').max)&(np.min(df[col])>np.finfo('float16').min):
#print(str(col)+" changing datatype from "+str(df[col].dtype)+" to " +'float16')
df[col]=df[col].astype('float16')
elif (np.max(df[col])<np.finfo('float32').max)&(np.min(df[col])>np.finfo('float32').min):
#print(str(col)+" changing datatype from "+str(df[col].dtype)+" to " +'float32')
df[col]=df[col].astype('float32')
else:
#print(str(col)+" changing datatype from "+str(df[col].dtype)+" to " +'float64')
df[col]=df[col].astype('float64')
except:
print("Exception for column ",col)
continue
final=df.memory_usage(deep=True).sum()
print("Datatypes updated and memory usage is reduced by :",initial-final)
return df
def my_plots(df,col):
osver_10=df[col].value_counts().reset_index()['index'].head(10).tolist()
#df[df.HasDetections==1][col].value_counts()
t1=df[df.HasDetections==1][col].value_counts().reset_index()
firmware_affected=t1[t1['index'].isin(osver_10)][col].tolist()
t2=df[df.HasDetections==0][col].value_counts().reset_index()
firmware_unaffected=t2[t2['index'].isin(osver_10)][col].tolist()
index=np.arange(len(osver_10))
#engine_affected
width=0.3
plt.figure(figsize=(12,6))
plt.barh(index,firmware_affected,width,color='r',label='Affected')
plt.barh(index+width,firmware_unaffected,width,color='g',label='Unaffected')
plt.ylabel(col)
plt.xlabel('Count')
plt.yticks(index+width/2,osver_10,rotation=0)
plt.legend(loc='best')
plt.show()
def year_month(df):
df['OsBuildLab']=df.OsBuildLab.astype('str')
temp=df.OsBuildLab.str.split(".",expand=True)
temp=temp[4].str.split("-",expand=True)
time_train=pd.DataFrame()
time_train['Year']=temp[0].astype('int')//10000
time_train['Month']=(temp[0].astype('int')%10000)//100
time_train['Day']=(temp[0].astype('int')%10000)%100
#time_train['Year'].value_counts()
#time_train['Month'].value_counts()
#time_train['Day'].value_counts()
#All Day id greater than 31 belong to November and December 2017.
#temp[temp[0].str.endswith('53')]
#temp[temp[0].str.endswith('57')]
#temp[temp[0].str.endswith('77')]
#temp[temp[0].str.endswith('32')]
#temp[temp[0].str.endswith('78')]
df['Year']=time_train['Year']
df['Month']=time_train['Month']
return df
%%time
df=year_month(df)
Wall time: 1min 45s
df_test.loc[6529507,'OsBuildLab']=df_test.loc[6529507,'OsBuildLab'].replace('*','.')
#temp=df_test.OsBuildLab.str.split(".",expand=True)
#temp=temp[4].str.split("-",expand=True)
%%time
df_test=year_month(df_test)
Wall time: 1min 47s
#df_train=df[~df.isnull().any(axis=1)]
sns.countplot(data=df,x='Year',hue='HasDetections')
plt.show()
sns.countplot(data=df,x='Year')
plt.show()
sns.countplot(data=df_test,x='Year')
plt.show()
sns.countplot(data=df,x='Month',hue='HasDetections')
plt.show()
sns.countplot(data=df,x='Month')
plt.show()
sns.countplot(data=df_test,x='Month')
plt.show()
df[(df.Month!=4)&(df.Year!=18)]['HasDetections'].value_counts()
0 1616458 1 1425774 Name: HasDetections, dtype: int64
#df[df.Year==18]['Census_InternalBatteryNumberOfCharges'].boxplot()
sns.boxplot(data=df[df.Year==15],y='Census_InternalBatteryNumberOfCharges',x='HasDetections')
<matplotlib.axes._subplots.AxesSubplot at 0x1ce7f2dc320>
for val in df.Year.unique():
sns.countplot(data=df[df.Year==val],x='Month',hue='HasDetections')
plt.title("Year "+str(val))
plt.show()
df=pd.get_dummies(data=df,columns=['Month','Year'],dtype='int8')
df_test=pd.get_dummies(data=df_test,columns=['Month','Year'],dtype='int8')
month_columns=df.columns[df.columns.str.startswith('Month')]
year_columns=df.columns[df.columns.str.startswith('Year')]
month_columns
Index(['Month_1', 'Month_2', 'Month_3', 'Month_4', 'Month_5', 'Month_6',
'Month_7', 'Month_8', 'Month_9', 'Month_10', 'Month_11', 'Month_12'],
dtype='object')
%%time
#i=1
for year in year_columns:
for month in month_columns:
new_col='Interaction_MY'+str(year)+str(month)
df[new_col]=np.multiply(df[year],df[month])
#i=i+1
Wall time: 8.2 s
df.head()
| MachineIdentifier | ProductName | EngineVersion | AppVersion | AvSigVersion | IsBeta | RtpStateBitfield | IsSxsPassiveMode | AVProductStatesIdentifier | AVProductsInstalled | ... | Interaction_MYYear_18Month_3 | Interaction_MYYear_18Month_4 | Interaction_MYYear_18Month_5 | Interaction_MYYear_18Month_6 | Interaction_MYYear_18Month_7 | Interaction_MYYear_18Month_8 | Interaction_MYYear_18Month_9 | Interaction_MYYear_18Month_10 | Interaction_MYYear_18Month_11 | Interaction_MYYear_18Month_12 | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0000028988387b115f69f31a3bf04f09 | win8defender | 1.1.15100.1 | 4.18.1807.18075 | 1.273.1735.0 | 0 | 7.0 | 0 | 53447.0 | 1.0 | ... | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
| 1 | 000007535c3f730efa9ea0b7ef1bd645 | win8defender | 1.1.14600.4 | 4.13.17134.1 | 1.263.48.0 | 0 | 7.0 | 0 | 53447.0 | 1.0 | ... | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
| 2 | 000007905a28d863f6d0d597892cd692 | win8defender | 1.1.15100.1 | 4.18.1807.18075 | 1.273.1341.0 | 0 | 7.0 | 0 | 53447.0 | 1.0 | ... | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
| 3 | 00000b11598a75ea8ba1beea8459149f | win8defender | 1.1.15100.1 | 4.18.1807.18075 | 1.273.1527.0 | 0 | 7.0 | 0 | 53447.0 | 1.0 | ... | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
| 4 | 000014a5f00daa18e76b81417eeb99fc | win8defender | 1.1.15100.1 | 4.18.1807.18075 | 1.273.1379.0 | 0 | 7.0 | 0 | 53447.0 | 1.0 | ... | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
5 rows × 220 columns
%%time
i=1
for year in year_columns:
for month in month_columns:
new_col='Interaction_MY'+str(year)+str(month)
df_test[new_col]=np.multiply(df_test[year],df_test[month])
i=i+1
Wall time: 7.06 s
#df_test.SmartScreen.replace('of','off',inplace=True)
#df_test.SmartScreen.replace('requiredadmin','requireadmin',inplace=True)
df['Device_PossibleOwnership']=[0 if x=='requireadmin' else 1 for x in df['SmartScreen']]
df_test['Device_PossibleOwnership']=[0 if x=='requireadmin' else 1 for x in df_test['SmartScreen']]
%%time
df=reduce_memory(df=df)
Changing Datatypes please wait....... Datatypes updated and memory usage is reduced by : 365749684 Wall time: 17.4 s
%%time
df_test=reduce_memory(df=df_test)
Changing Datatypes please wait....... Datatypes updated and memory usage is reduced by : 267010602 Wall time: 14.6 s
fig,ax=plt.subplots(figsize=(12,7))
sns.boxplot(data=df,x=categorical_columns[0],y=numerical_columns[7],hue='HasDetections',ax=ax)
<matplotlib.axes._subplots.AxesSubplot at 0x2682f7f9eb8>
fig,ax=plt.subplots(figsize=(12,7))
sns.barplot(data=df,x=categorical_columns[0],y=numerical_columns[0],ax=ax)
plt.show()
C:\Users\gandh\Anaconda3\lib\site-packages\scipy\stats\stats.py:1713: FutureWarning: Using a non-tuple sequence for multidimensional indexing is deprecated; use `arr[tuple(seq)]` instead of `arr[seq]`. In the future this will be interpreted as an array index, `arr[np.array(seq)]`, which will result either in an error or a different result. return np.add.reduce(sorted[indexer] * weights, axis=axis) / sumval
fig,ax=plt.subplots(figsize=(12,7))
sns.barplot(data=df,x='OsPlatformSubRelease',y=numerical_columns[0],ax=ax)
plt.show()
C:\Users\gandh\Anaconda3\lib\site-packages\scipy\stats\stats.py:1713: FutureWarning: Using a non-tuple sequence for multidimensional indexing is deprecated; use `arr[tuple(seq)]` instead of `arr[seq]`. In the future this will be interpreted as an array index, `arr[np.array(seq)]`, which will result either in an error or a different result. return np.add.reduce(sorted[indexer] * weights, axis=axis) / sumval
fig,ax=plt.subplots(figsize=(12,7))
sns.barplot(data=df,x='Census_DeviceFamily',y=numerical_columns[0],ax=ax)
plt.show()
C:\Users\gandh\Anaconda3\lib\site-packages\scipy\stats\stats.py:1713: FutureWarning: Using a non-tuple sequence for multidimensional indexing is deprecated; use `arr[tuple(seq)]` instead of `arr[seq]`. In the future this will be interpreted as an array index, `arr[np.array(seq)]`, which will result either in an error or a different result. return np.add.reduce(sorted[indexer] * weights, axis=axis) / sumval
fig,ax=plt.subplots(figsize=(12,7))
sns.violinplot(data=df,x=categorical_columns[0],y=numerical_columns[0],hue='HasDetections')
C:\Users\gandh\Anaconda3\lib\site-packages\numpy\core\_methods.py:107: RuntimeWarning: overflow encountered in reduce arrmean = umr_sum(arr, axis, dtype, keepdims=True) C:\Users\gandh\Anaconda3\lib\site-packages\numpy\core\function_base.py:133: RuntimeWarning: invalid value encountered in multiply y *= step C:\Users\gandh\Anaconda3\lib\site-packages\numpy\core\function_base.py:142: RuntimeWarning: invalid value encountered in add y += start C:\Users\gandh\Anaconda3\lib\site-packages\numpy\core\_methods.py:28: RuntimeWarning: invalid value encountered in reduce return umr_maximum(a, axis, None, out, keepdims, initial) C:\Users\gandh\Anaconda3\lib\site-packages\scipy\stats\stats.py:1713: FutureWarning: Using a non-tuple sequence for multidimensional indexing is deprecated; use `arr[tuple(seq)]` instead of `arr[seq]`. In the future this will be interpreted as an array index, `arr[np.array(seq)]`, which will result either in an error or a different result. return np.add.reduce(sorted[indexer] * weights, axis=axis) / sumval
<matplotlib.axes._subplots.AxesSubplot at 0x1f8f62342b0>
fig,ax=plt.subplots(figsize=(12,7))
sns.violinplot(data=df,x=categorical_columns[0],y=numerical_columns[4],hue='HasDetections')
C:\Users\gandh\Anaconda3\lib\site-packages\numpy\core\_methods.py:107: RuntimeWarning: overflow encountered in reduce arrmean = umr_sum(arr, axis, dtype, keepdims=True) C:\Users\gandh\Anaconda3\lib\site-packages\numpy\core\function_base.py:133: RuntimeWarning: invalid value encountered in multiply y *= step C:\Users\gandh\Anaconda3\lib\site-packages\numpy\core\function_base.py:142: RuntimeWarning: invalid value encountered in add y += start C:\Users\gandh\Anaconda3\lib\site-packages\numpy\core\_methods.py:28: RuntimeWarning: invalid value encountered in reduce return umr_maximum(a, axis, None, out, keepdims, initial) C:\Users\gandh\Anaconda3\lib\site-packages\scipy\stats\stats.py:1713: FutureWarning: Using a non-tuple sequence for multidimensional indexing is deprecated; use `arr[tuple(seq)]` instead of `arr[seq]`. In the future this will be interpreted as an array index, `arr[np.array(seq)]`, which will result either in an error or a different result. return np.add.reduce(sorted[indexer] * weights, axis=axis) / sumval
<matplotlib.axes._subplots.AxesSubplot at 0x1f932940ac8>
df[numerical_columns].describe().T
| count | mean | std | min | 25% | 50% | 75% | max | |
|---|---|---|---|---|---|---|---|---|
| Census_ProcessorCoreCount | 8920724.0 | NaN | 0.000000e+00 | 1.000000 | 2.000000 | 4.0 | 4.000000e+00 | 1.920000e+02 |
| Census_PrimaryDiskTotalCapacity | 8920724.0 | 2.895155e+06 | 4.438576e+09 | 0.000000 | 244197.000000 | 476940.0 | 9.538690e+05 | 8.160437e+12 |
| Census_SystemVolumeTotalCapacity | 8920724.0 | 3.816120e+05 | 3.230770e+05 | 0.000000 | 120827.000000 | 249500.0 | 4.759650e+05 | 4.768710e+07 |
| Census_TotalPhysicalRAM | 8920724.0 | 6.091263e+03 | 4.951270e+03 | 255.000000 | 4096.000000 | 4096.0 | 8.192000e+03 | 1.572864e+06 |
| Census_InternalPrimaryDiagonalDisplaySizeInInches | 8920724.0 | NaN | 0.000000e+00 | 0.700195 | 13.898438 | 15.5 | 1.720312e+01 | 1.822500e+02 |
| Census_InternalPrimaryDisplayResolutionHorizontal | 8920724.0 | NaN | NaN | -1.000000 | 1366.000000 | 1366.0 | 1.920000e+03 | 1.228800e+04 |
| Census_InternalPrimaryDisplayResolutionVertical | 8920724.0 | NaN | NaN | -1.000000 | 768.000000 | 768.0 | 1.080000e+03 | 8.640000e+03 |
| Census_InternalBatteryNumberOfCharges | 8920724.0 | 1.089876e+09 | 1.925254e+09 | 0.000000 | 0.000000 | 0.0 | 4.294967e+09 | 4.294967e+09 |
np.min(df.Census_TotalPhysicalRAM)
255.0
df[df.Census_TotalPhysicalRAM>409600].shape
(34, 219)
df[df.Census_TotalPhysicalRAM>509600][['OsPlatformSubRelease','AVProductStatesIdentifier','RtpStateBitfield','Census_DeviceFamily','Wdft_IsGamer','AVProductsEnabled','HasDetections']]
| OsPlatformSubRelease | AVProductStatesIdentifier | RtpStateBitfield | Census_DeviceFamily | Wdft_IsGamer | AVProductsEnabled | HasDetections | |
|---|---|---|---|---|---|---|---|
| 77402 | rs4 | 53447.0 | 7.0 | Windows.Desktop | 0.0 | 1.0 | 0 |
| 282900 | rs1 | 53447.0 | 7.0 | Windows.Server | 0.0 | 1.0 | 0 |
| 359909 | rs1 | 53447.0 | 7.0 | Windows.Desktop | 0.0 | 1.0 | 0 |
| 423049 | rs1 | 53447.0 | 7.0 | Windows.Server | 0.0 | 1.0 | 0 |
| 843490 | rs4 | 53447.0 | 7.0 | Windows.Desktop | 0.0 | 1.0 | 1 |
| 844448 | rs4 | 53447.0 | 7.0 | Windows.Desktop | 0.0 | 1.0 | 0 |
| 912233 | rs1 | 53447.0 | 7.0 | Windows.Server | 0.0 | 1.0 | 0 |
| 942736 | rs1 | 53447.0 | 7.0 | Windows.Server | 0.0 | 1.0 | 1 |
| 1119826 | rs4 | 35379.0 | 0.0 | Windows.Desktop | 0.0 | 1.0 | 0 |
| 1827338 | rs1 | 53447.0 | 7.0 | Windows.Server | 0.0 | 1.0 | 1 |
| 1866798 | rs1 | 53447.0 | 7.0 | Windows.Server | 0.0 | 1.0 | 0 |
| 1882995 | rs1 | 53447.0 | 7.0 | Windows.Server | 0.0 | 1.0 | 0 |
| 1905786 | rs1 | 53447.0 | 7.0 | Windows.Server | 0.0 | 1.0 | 0 |
| 2382892 | rs1 | 53447.0 | 7.0 | Windows.Server | 0.0 | 1.0 | 0 |
| 2404851 | rs1 | 53447.0 | 7.0 | Windows.Server | 0.0 | 1.0 | 0 |
| 3168236 | rs4 | 53447.0 | 7.0 | Windows.Desktop | 0.0 | 1.0 | 1 |
| 3283213 | rs1 | 53447.0 | 7.0 | Windows.Server | 0.0 | 1.0 | 1 |
| 3497061 | rs1 | 53447.0 | 7.0 | Windows.Server | 0.0 | 1.0 | 0 |
| 3528025 | rs1 | 53447.0 | 7.0 | Windows.Server | 0.0 | 1.0 | 0 |
| 3912495 | rs4 | 53447.0 | 7.0 | Windows.Desktop | 0.0 | 1.0 | 0 |
| 3991746 | rs1 | 53447.0 | 7.0 | Windows.Server | 0.0 | 1.0 | 1 |
| 4222767 | rs4 | 53447.0 | 7.0 | Windows.Desktop | 0.0 | 1.0 | 0 |
| 4683372 | rs1 | 53447.0 | 7.0 | Windows.Server | 0.0 | 1.0 | 0 |
| 4778357 | rs1 | 53447.0 | 7.0 | Windows.Server | 0.0 | 1.0 | 0 |
| 4904555 | rs4 | 53447.0 | 7.0 | Windows.Desktop | 1.0 | 1.0 | 1 |
| 4978744 | rs1 | 53447.0 | 7.0 | Windows.Server | 0.0 | 1.0 | 0 |
| 5684683 | rs3 | 53447.0 | 7.0 | Windows.Desktop | 0.0 | 1.0 | 0 |
| 6164464 | rs4 | 53447.0 | 7.0 | Windows.Desktop | 0.0 | 1.0 | 0 |
| 6643797 | rs1 | 53447.0 | 7.0 | Windows.Server | 0.0 | 1.0 | 0 |
| 7898158 | rs4 | 30961.0 | 7.0 | Windows.Desktop | 0.0 | 1.0 | 0 |
| 7957693 | rs3 | 53447.0 | 7.0 | Windows.Desktop | 0.0 | 1.0 | 0 |
| 8564928 | rs4 | 53447.0 | 7.0 | Windows.Desktop | 0.0 | 1.0 | 1 |
df[categorical_columns].describe().T
| count | mean | std | min | 25% | 50% | 75% | max | |
|---|---|---|---|---|---|---|---|---|
| CityIdentifier | 8920724.0 | 80639.789062 | 48838.632812 | 5.0 | 36825.0 | 82373.0 | 124736.0 | 167962.0 |
df_test.AvSigVersion.nunique()
9357
df.AvSigVersion.nunique()
8531
len(set(df_test.AvSigVersion.unique())-set(df.AvSigVersion.unique()))
1092
fig,ax=plt.subplots(figsize=(12,7))
sns.countplot(x=df['HasDetections'],hue=df['OsPlatformSubRelease'],ax=ax)
<matplotlib.axes._subplots.AxesSubplot at 0x1cf08da03c8>
fig,ax=plt.subplots(figsize=(12,7))
sns.countplot(x=df['OsPlatformSubRelease'],hue=df['HasDetections'],ax=ax,palette={0:'g',1:'r'})
<matplotlib.axes._subplots.AxesSubplot at 0x24069fd35f8>
fig,ax=plt.subplots(figsize=(12,7))
sns.countplot(x=df['Platform'],hue=df['HasDetections'],ax=ax,palette={0:'g',1:'r'})
<matplotlib.axes._subplots.AxesSubplot at 0x23fb17eee10>
fig,ax=plt.subplots(figsize=(12,7))
sns.countplot(x=df['Census_ProcessorManufacturerIdentifier'],hue=df['HasDetections'],ax=ax,palette={0:'g',1:'r'})
<matplotlib.axes._subplots.AxesSubplot at 0x240c3c657f0>
fig,ax=plt.subplots(figsize=(12,7))
sns.countplot(x=df['RtpStateBitfield'],hue=df['HasDetections'],ax=ax,palette={0:'g',1:'r'})
<matplotlib.axes._subplots.AxesSubplot at 0x24105bcceb8>
fig,ax=plt.subplots(figsize=(12,7))
sns.countplot(x=df['IsBeta'],hue=df['HasDetections'],ax=ax,palette={0:'g',1:'r'})
<matplotlib.axes._subplots.AxesSubplot at 0x2405aabe7b8>
fig,ax=plt.subplots(figsize=(12,7))
sns.countplot(x=df['AVProductsInstalled'],hue=df['HasDetections'],ax=ax,palette={0:'g',1:'r'})
<matplotlib.axes._subplots.AxesSubplot at 0x240ff48e048>
fig,ax=plt.subplots(figsize=(12,7))
sns.countplot(x=df['AVProductsEnabled'],hue=df['HasDetections'],ax=ax,palette={0:'g',1:'r'})
<matplotlib.axes._subplots.AxesSubplot at 0x240a73d8c18>
fig,ax=plt.subplots(figsize=(12,7))
sns.countplot(x=df['SkuEdition'],hue=df['HasDetections'],ax=ax,palette={0:'g',1:'r'})
<matplotlib.axes._subplots.AxesSubplot at 0x24126963518>
fig,ax=plt.subplots(figsize=(20,7))
sns.countplot(x=df['SmartScreen'],hue=df['HasDetections'],ax=ax,palette={0:'g',1:'r'})
<matplotlib.axes._subplots.AxesSubplot at 0x23f0e29ecf8>
fig,ax=plt.subplots(figsize=(20,7))
sns.countplot(x=df['Census_MDC2FormFactor'],hue=df['HasDetections'],ax=ax,palette={0:'g',1:'r'})
<matplotlib.axes._subplots.AxesSubplot at 0x2406ee1cdd8>
fig,ax=plt.subplots(figsize=(20,7))
sns.countplot(x=df['Wdft_IsGamer'],hue=df['HasDetections'],ax=ax,palette={0:'g',1:'r'})
<matplotlib.axes._subplots.AxesSubplot at 0x2413092aef0>
fig,ax=plt.subplots(figsize=(20,7))
sns.countplot(x=df['AVProductsInstalled'],hue=df['HasDetections'],ax=ax,palette={0:'g',1:'r'})
<matplotlib.axes._subplots.AxesSubplot at 0x240d22a7518>
fig,ax=plt.subplots(figsize=(20,7))
sns.countplot(x=df[df.Wdft_IsGamer==1]['AVProductsInstalled'],hue=df[df.Wdft_IsGamer==1]['HasDetections'],ax=ax,palette={0:'g',1:'r'})
<matplotlib.axes._subplots.AxesSubplot at 0x23ef9ffd0f0>
fig,ax=plt.subplots(figsize=(20,7))
sns.countplot(x=df[df.Wdft_IsGamer==1]['AVProductsEnabled'],hue=df[df.Wdft_IsGamer==1]['HasDetections'],ax=ax,palette={0:'g',1:'r'})
<matplotlib.axes._subplots.AxesSubplot at 0x2405cfbf160>
fig,ax=plt.subplots(figsize=(20,7))
sns.countplot(x=df[df.EngineVersion.isin(['1.1.15200.1','1.1.15100.1'])]['AVProductsEnabled'],hue=df[df.EngineVersion.isin(['1.1.15200.1','1.1.15100.1'])]['HasDetections'],ax=ax,palette={0:'g',1:'r'})
<matplotlib.axes._subplots.AxesSubplot at 0x2412d318a58>
fig,ax=plt.subplots(figsize=(20,7))
sns.countplot(x=df[df.EngineVersion.isin(['1.1.15200.1','1.1.15100.1'])]['Wdft_IsGamer'],hue=df[df.EngineVersion.isin(['1.1.15200.1','1.1.15100.1'])]['HasDetections'],ax=ax,palette={0:'g',1:'r'})
<matplotlib.axes._subplots.AxesSubplot at 0x2409768ea90>
fig,ax=plt.subplots(figsize=(20,7))
sns.countplot(x=df[df.AppVersion=='4.18.1807.18075']['Wdft_IsGamer'],hue=df[df.AppVersion=='4.18.1807.18075']['HasDetections'],ax=ax,palette={0:'g',1:'r'})
<matplotlib.axes._subplots.AxesSubplot at 0x23ef26f2278>
fig,ax=plt.subplots(figsize=(20,7))
sns.countplot(x=df['Wdft_IsGamer'],hue=df['HasDetections'],ax=ax,palette={0:'g',1:'r'})
<matplotlib.axes._subplots.AxesSubplot at 0x24093c82cc0>
fig,ax=plt.subplots(figsize=(20,7))
sns.countplot(x=df[(df.AVProductsInstalled==1)&(df.Wdft_IsGamer==1)]['ProductName'],hue=df[(df.AVProductsInstalled==1)&(df.Wdft_IsGamer==1)]['HasDetections'],ax=ax,palette={0:'g',1:'r'})
<matplotlib.axes._subplots.AxesSubplot at 0x1fbecdc0390>
#df['AVProductsInstalled_1']=[1 if x==1 else 0 for x in df['AVProductsInstalled']]
#df['Interaction_17']=np.logical_and(np.logical_and(df.AVProductsInstalled_1,df.Wdft_IsGamer),df.ProductName_win8defender.astype('bool')).astype('int8')
my_plots(df,'OsBuild')
my_plots(df,categorical_columns[1])
my_plots(df,categorical_columns[2])
my_plots(df,categorical_columns[3])
my_plots(df,categorical_columns[4])
my_plots(df,categorical_columns[5])
my_plots(df,categorical_columns[6])
my_plots(df,categorical_columns[7])
my_plots(df,categorical_columns[8])
my_plots(df,categorical_columns[9])
my_plots(df,categorical_columns[10])
my_plots(df,categorical_columns[11])
my_plots(df,categorical_columns[12])
my_plots(df,categorical_columns[13])
my_plots(df,categorical_columns[14])
my_plots(df,categorical_columns[15])
my_plots(df,categorical_columns[16])
my_plots(df,categorical_columns[17])
my_plots(df,categorical_columns[18])
my_plots(df,categorical_columns[19])
my_plots(df,categorical_columns[21])
my_plots(df,categorical_columns[22])
my_plots(df,categorical_columns[23])
my_plots(df,categorical_columns[24])
my_plots(df,categorical_columns[25])
my_plots(df,categorical_columns[26])
my_plots(df,categorical_columns[27])
my_plots(df,categorical_columns[28])
my_plots(df,categorical_columns[29])
my_plots(df,categorical_columns[30])
my_plots(df,categorical_columns[31])
my_plots(df,categorical_columns[32])
my_plots(df,categorical_columns[33])
my_plots(df,categorical_columns[34])
my_plots(df,categorical_columns[35])
my_plots(df,categorical_columns[36])
my_plots(df,categorical_columns[37])
my_plots(df,categorical_columns[38])
my_plots(df,categorical_columns[39])
my_plots(df,categorical_columns[40])
my_plots(df,categorical_columns[41])
my_plots(df,categorical_columns[42])
my_plots(df,categorical_columns[43])
my_plots(df,categorical_columns[44])
my_plots(df,categorical_columns[45])
my_plots(df,categorical_columns[46])
my_plots(df,categorical_columns[47])
import category_encoders as ce
from tqdm import tqdm
import warnings
import gc
import time
from sklearn.metrics import mean_squared_error
warnings.simplefilter(action='ignore', category=FutureWarning)
def frequency_encoding(df,df_test,variable):
t = pd.concat([df[variable], df_test[variable]]).value_counts().reset_index()
t = t.reset_index()
t.loc[t[variable] == 1, 'level_0'] = np.nan
t.set_index('index', inplace=True)
max_label = t['level_0'].max() + 1
t.fillna(max_label, inplace=True)
return t.to_dict()['level_0']
%%time
for variable in tqdm(categorical_columns):
freq_enc_dict = frequency_encoding(df,df_test,variable)
df[variable] = df[variable].map(lambda x: freq_enc_dict.get(x, np.nan))
df_test[variable] = df_test[variable].map(lambda x: freq_enc_dict.get(x, np.nan))
categorical_columns.remove(variable)
50%|█████████████████████████████████████████ | 24/48 [00:43<00:57, 2.38s/it]
Wall time: 43.9 s
%%time
indexer = {}
for col in tqdm(categorical_columns):
if col == 'MachineIdentifier': continue
_, indexer[col] = pd.factorize(df[col])
for col in tqdm(categorical_columns):
if col == 'MachineIdentifier': continue
df[col] = indexer[col].get_indexer(df[col])
df_test[col] = indexer[col].get_indexer(df_test[col])
100%|██████████████████████████████████████████████████████████████████████████████████| 24/24 [00:09<00:00, 3.53it/s] 100%|██████████████████████████████████████████████████████████████████████████████████| 24/24 [36:39<00:00, 67.87s/it]
Wall time: 36min 48s
%%time
for col in df.columns[1:]:
if df[col].nunique()==2:
print(df[col].value_counts())
print("\n")
0 8920657 1 67 Name: IsBeta, dtype: int64 0 8766103 1 154621 Name: IsSxsPassiveMode, dtype: int64 1 8813414 0 107310 Name: HasTpm, dtype: int64 1.0 8437593 0.0 483131 Name: IsProtected, dtype: int64 0 8920466 1 258 Name: AutoSampleOptIn, dtype: int64 0.0 8916843 1.0 3881 Name: SMode, dtype: int64 1.0 8731622 0.0 189102 Name: Firewall, dtype: int64 0 8232180 1 688544 Name: Census_HasOpticalDiskDrive, dtype: int64 0 8915860 1 4864 Name: Census_IsPortableOperatingSystem, dtype: int64 0.0 8920637 1.0 87 Name: Census_IsFlightsDisabled, dtype: int64 0.0 8919908 1.0 816 Name: Census_ThresholdOptIn, dtype: int64 0 4584979 1 4335745 Name: Census_IsSecureBootEnabled, dtype: int64 0.0 8920723 1.0 1 Name: Census_IsWIMBootEnabled, dtype: int64 0.0 8858046 1.0 62678 Name: Census_IsVirtualDevice, dtype: int64 0 7800821 1 1119903 Name: Census_IsTouchEnabled, dtype: int64 0 8581166 1 339558 Name: Census_IsPenCapable, dtype: int64 0.0 8412643 1.0 508081 Name: Census_IsAlwaysOnAlwaysConnectedCapable, dtype: int64 0.0 6477019 1.0 2443705 Name: Wdft_IsGamer, dtype: int64 0 4462256 1 4458468 Name: HasDetections, dtype: int64 0 8899401 1 21323 Name: Month_1, dtype: int64 0 8872556 1 48168 Name: Month_2, dtype: int64 0 7826917 1 1093807 Name: Month_3, dtype: int64 0 4897093 1 4023631 Name: Month_4, dtype: int64 0 7629648 1 1291076 Name: Month_5, dtype: int64 0 8580966 1 339758 Name: Month_6, dtype: int64 0 8697782 1 222942 Name: Month_7, dtype: int64 0 8766232 1 154492 Name: Month_8, dtype: int64 0 7441694 1 1479030 Name: Month_9, dtype: int64 0 8821495 1 99229 Name: Month_10, dtype: int64 0 8851301 1 69423 Name: Month_11, dtype: int64 0 8842879 1 77845 Name: Month_12, dtype: int64 0 8920567 1 157 Name: Year_9, dtype: int64 0 8920543 1 181 Name: Year_10, dtype: int64 0 8920430 1 294 Name: Year_11, dtype: int64 0 8920684 1 40 Name: Year_12, dtype: int64 0 8920458 1 266 Name: Year_13, dtype: int64 0 8919032 1 1692 Name: Year_14, dtype: int64 0 8829890 1 90834 Name: Year_15, dtype: int64 0 8599707 1 321017 Name: Year_16, dtype: int64 0 6249001 1 2671723 Name: Year_17, dtype: int64 1 5834520 0 3086204 Name: Year_18, dtype: int64 0 8920569 1 155 Name: Interaction_MYYear_9Month_7, dtype: int64 0 8920722 1 2 Name: Interaction_MYYear_9Month_12, dtype: int64 0 8920712 1 12 Name: Interaction_MYYear_10Month_2, dtype: int64 0 8920714 1 10 Name: Interaction_MYYear_10Month_6, dtype: int64 0 8920723 1 1 Name: Interaction_MYYear_10Month_9, dtype: int64 0 8920716 1 8 Name: Interaction_MYYear_10Month_10, dtype: int64 0 8920574 1 150 Name: Interaction_MYYear_10Month_11, dtype: int64 0 8920477 1 247 Name: Interaction_MYYear_11Month_4, dtype: int64 0 8920688 1 36 Name: Interaction_MYYear_11Month_6, dtype: int64 0 8920721 1 3 Name: Interaction_MYYear_11Month_10, dtype: int64 0 8920716 1 8 Name: Interaction_MYYear_11Month_11, dtype: int64 0 8920703 1 21 Name: Interaction_MYYear_12Month_3, dtype: int64 0 8920720 1 4 Name: Interaction_MYYear_12Month_4, dtype: int64 0 8920720 1 4 Name: Interaction_MYYear_12Month_5, dtype: int64 0 8920713 1 11 Name: Interaction_MYYear_12Month_8, dtype: int64 0 8920719 1 5 Name: Interaction_MYYear_13Month_1, dtype: int64 0 8920671 1 53 Name: Interaction_MYYear_13Month_3, dtype: int64 0 8920721 1 3 Name: Interaction_MYYear_13Month_5, dtype: int64 0 8920720 1 4 Name: Interaction_MYYear_13Month_7, dtype: int64 0 8920539 1 185 Name: Interaction_MYYear_13Month_8, dtype: int64 0 8920723 1 1 Name: Interaction_MYYear_13Month_9, dtype: int64 0 8920709 1 15 Name: Interaction_MYYear_13Month_10, dtype: int64 0 8920704 1 20 Name: Interaction_MYYear_14Month_2, dtype: int64 0 8920075 1 649 Name: Interaction_MYYear_14Month_3, dtype: int64 0 8920381 1 343 Name: Interaction_MYYear_14Month_7, dtype: int64 0 8920615 1 109 Name: Interaction_MYYear_14Month_8, dtype: int64 0 8920179 1 545 Name: Interaction_MYYear_14Month_10, dtype: int64 0 8920698 1 26 Name: Interaction_MYYear_14Month_12, dtype: int64 0 8920509 1 215 Name: Interaction_MYYear_15Month_1, dtype: int64 0 8920691 1 33 Name: Interaction_MYYear_15Month_2, dtype: int64 0 8920168 1 556 Name: Interaction_MYYear_15Month_3, dtype: int64 0 8920688 1 36 Name: Interaction_MYYear_15Month_4, dtype: int64 0 8920573 1 151 Name: Interaction_MYYear_15Month_5, dtype: int64 0 8885886 1 34838 Name: Interaction_MYYear_15Month_7, dtype: int64 0 8914201 1 6523 Name: Interaction_MYYear_15Month_8, dtype: int64 0 8919121 1 1603 Name: Interaction_MYYear_15Month_9, dtype: int64 0 8888974 1 31750 Name: Interaction_MYYear_15Month_10, dtype: int64 0 8905750 1 14974 Name: Interaction_MYYear_15Month_11, dtype: int64 0 8920569 1 155 Name: Interaction_MYYear_15Month_12, dtype: int64 0 8906060 1 14664 Name: Interaction_MYYear_16Month_1, dtype: int64 0 8895566 1 25158 Name: Interaction_MYYear_16Month_2, dtype: int64 0 8912207 1 8517 Name: Interaction_MYYear_16Month_3, dtype: int64 0 8906477 1 14247 Name: Interaction_MYYear_16Month_4, dtype: int64 0 8907956 1 12768 Name: Interaction_MYYear_16Month_5, dtype: int64 0 8905557 1 15167 Name: Interaction_MYYear_16Month_6, dtype: int64 0 8857429 1 63295 Name: Interaction_MYYear_16Month_7, dtype: int64 0 8902084 1 18640 Name: Interaction_MYYear_16Month_8, dtype: int64 0 8898163 1 22561 Name: Interaction_MYYear_16Month_9, dtype: int64 0 8870372 1 50352 Name: Interaction_MYYear_16Month_10, dtype: int64 0 8895744 1 24980 Name: Interaction_MYYear_16Month_11, dtype: int64 0 8870056 1 50668 Name: Interaction_MYYear_16Month_12, dtype: int64 0 8919999 1 725 Name: Interaction_MYYear_17Month_1, dtype: int64 0 8919326 1 1398 Name: Interaction_MYYear_17Month_2, dtype: int64 0 8096779 1 823945 Name: Interaction_MYYear_17Month_3, dtype: int64 0 8891286 1 29438 Name: Interaction_MYYear_17Month_4, dtype: int64 0 8919438 1 1286 Name: Interaction_MYYear_17Month_5, dtype: int64 0 8690223 1 230501 Name: Interaction_MYYear_17Month_6, dtype: int64 0 8860746 1 59978 Name: Interaction_MYYear_17Month_7, dtype: int64 0 8919186 1 1538 Name: Interaction_MYYear_17Month_8, dtype: int64 0 7470671 1 1450053 Name: Interaction_MYYear_17Month_9, dtype: int64 0 8904168 1 16556 Name: Interaction_MYYear_17Month_10, dtype: int64 0 8891413 1 29311 Name: Interaction_MYYear_17Month_11, dtype: int64 0 8893730 1 26994 Name: Interaction_MYYear_17Month_12, dtype: int64 0 8915010 1 5714 Name: Interaction_MYYear_18Month_1, dtype: int64 0 8899177 1 21547 Name: Interaction_MYYear_18Month_2, dtype: int64 0 8660658 1 260066 Name: Interaction_MYYear_18Month_3, dtype: int64 0 4941065 1 3979659 Name: Interaction_MYYear_18Month_4, dtype: int64 0 7643860 1 1276864 Name: Interaction_MYYear_18Month_5, dtype: int64 0 8826680 1 94044 Name: Interaction_MYYear_18Month_6, dtype: int64 0 8856395 1 64329 Name: Interaction_MYYear_18Month_7, dtype: int64 0 8793238 1 127486 Name: Interaction_MYYear_18Month_8, dtype: int64 0 8915913 1 4811 Name: Interaction_MYYear_18Month_9, dtype: int64 0 7493205 1 1427519 Name: Device_PossibleOwnership, dtype: int64 Wall time: 41.7 s
df.drop(columns=['Interaction_MYYear_15Month_12','Interaction_MYYear_15Month_5','Interaction_MYYear_15Month_4','Interaction_MYYear_15Month_2','Interaction_MYYear_14Month_12','Interaction_MYYear_14Month_8','Interaction_MYYear_14Month_2','Interaction_MYYear_13Month_10','Interaction_MYYear_13Month_9','Interaction_MYYear_13Month_8','Interaction_MYYear_13Month_7','Interaction_MYYear_13Month_5','Interaction_MYYear_13Month_3','Interaction_MYYear_13Month_1','Interaction_MYYear_12Month_8','Interaction_MYYear_12Month_5','Interaction_MYYear_12Month_4','Interaction_MYYear_12Month_3','Interaction_MYYear_11Month_11','Interaction_MYYear_11Month_10','Interaction_MYYear_11Month_6','Interaction_MYYear_11Month_4','Interaction_MYYear_10Month_11','Interaction_MYYear_10Month_10','Interaction_MYYear_10Month_9','Interaction_MYYear_10Month_6','Year_9','Year_10','Year_11','Year_12','Year_13','Interaction_MYYear_9Month_7','Interaction_MYYear_9Month_12','Interaction_MYYear_10Month_2'],inplace=True)
df_test.drop(columns=['Interaction_MYYear_15Month_12','Interaction_MYYear_15Month_5','Interaction_MYYear_15Month_4','Interaction_MYYear_15Month_2','Interaction_MYYear_14Month_12','Interaction_MYYear_14Month_8','Interaction_MYYear_14Month_2','Interaction_MYYear_13Month_10','Interaction_MYYear_13Month_9','Interaction_MYYear_13Month_8','Interaction_MYYear_13Month_7','Interaction_MYYear_13Month_5','Interaction_MYYear_13Month_3','Interaction_MYYear_13Month_1','Interaction_MYYear_12Month_8','Interaction_MYYear_12Month_5','Interaction_MYYear_12Month_4','Interaction_MYYear_12Month_3','Interaction_MYYear_11Month_11','Interaction_MYYear_11Month_10','Interaction_MYYear_11Month_6','Interaction_MYYear_11Month_4','Interaction_MYYear_10Month_11','Interaction_MYYear_10Month_10','Interaction_MYYear_10Month_9','Interaction_MYYear_10Month_6','Year_9','Year_10','Year_11','Year_12','Year_13','Interaction_MYYear_9Month_7','Interaction_MYYear_9Month_12','Interaction_MYYear_10Month_2'],inplace=True)
df.shape
(8920724, 187)
df_test.shape
(7853253, 186)
%%time
#df.drop(columns=list(df.columns[df.columns.str.endswith('encode')]),inplace=True)
#df_test.drop(columns=list(df_test.columns[df_test.columns.str.endswith('encode')]),inplace=True)
Wall time: 0 ns
%%time
df=reduce_memory(df)
df_test=reduce_memory(df_test)
Changing Datatypes please wait....... Datatypes updated and memory usage is reduced by : 1793065524 Changing Datatypes please wait....... Datatypes updated and memory usage is reduced by : 1625623371 Wall time: 53.1 s
df.info()
<class 'pandas.core.frame.DataFrame'> Int64Index: 8920724 entries, 0 to 8921482 Columns: 187 entries, MachineIdentifier to Device_PossibleOwnership dtypes: category(17), float16(23), float32(4), int16(9), int32(2), int8(132) memory usage: 2.5 GB
df_test.info()
<class 'pandas.core.frame.DataFrame'> RangeIndex: 7853253 entries, 0 to 7853252 Columns: 186 entries, MachineIdentifier to Device_PossibleOwnership dtypes: category(16), float16(24), float32(4), int16(9), int32(2), int8(131) memory usage: 2.1 GB
%%time
s=df['AVProductsInstalled']-df['AVProductsEnabled']
r=df_test['AVProductsInstalled']-df_test['AVProductsEnabled']
df['AV_highrisk']=[1 if (x==0)&(y==0) else 0 for x,y in zip(df.AVProductsInstalled,s)]
df_test['AV_highrisk']=[1 if (x==0)&(y==0) else 0 for x,y in zip(df_test.AVProductsInstalled,r)]
df['AV_mediumrisk']=[1 if (x==1)&(y==0) else 0 for x,y in zip(df.AVProductsInstalled,s)]
df_test['AV_mediumrisk']=[1 if (x==1)&(y==0) else 0 for x,y in zip(df_test.AVProductsInstalled,r)]
df['AV_lowrisk']=[1 if (x>1)&(y>1) else 0 for x,y in zip(df.AVProductsInstalled,s)]
df_test['AV_lowrisk']=[1 if (x==0)&(y==0) else 0 for x,y in zip(df_test.AVProductsInstalled,r)]
Wall time: 22.5 s
df.columns[df.isnull().any()]
Index([], dtype='object')
df.shape
(8920724, 190)
df_test.shape
(7853253, 189)
df.memory_usage(deep=True).sum()
3573865091
df_test.memory_usage(deep=True).sum()
3115615234
df[numerical_columns].corr()
| Census_ProcessorCoreCount | Census_PrimaryDiskTotalCapacity | Census_SystemVolumeTotalCapacity | Census_TotalPhysicalRAM | Census_InternalPrimaryDiagonalDisplaySizeInInches | Census_InternalPrimaryDisplayResolutionHorizontal | Census_InternalPrimaryDisplayResolutionVertical | Census_InternalBatteryNumberOfCharges | |
|---|---|---|---|---|---|---|---|---|
| Census_ProcessorCoreCount | 1.000000 | -0.000187 | 0.081713 | 0.595529 | 0.183298 | 0.344943 | 0.315479 | 0.117078 |
| Census_PrimaryDiskTotalCapacity | -0.000187 | 1.000000 | 0.000787 | -0.000066 | -0.000059 | -0.000178 | -0.000250 | -0.000334 |
| Census_SystemVolumeTotalCapacity | 0.081713 | 0.000787 | 1.000000 | 0.153180 | 0.081792 | 0.019730 | -0.033649 | -0.001289 |
| Census_TotalPhysicalRAM | 0.595529 | -0.000066 | 0.153180 | 1.000000 | 0.241174 | 0.366244 | 0.336088 | 0.182585 |
| Census_InternalPrimaryDiagonalDisplaySizeInInches | 0.183298 | -0.000059 | 0.081792 | 0.241174 | 1.000000 | 0.320344 | 0.277455 | 0.513360 |
| Census_InternalPrimaryDisplayResolutionHorizontal | 0.344943 | -0.000178 | 0.019730 | 0.366244 | 0.320344 | 1.000000 | 0.901674 | 0.169044 |
| Census_InternalPrimaryDisplayResolutionVertical | 0.315479 | -0.000250 | -0.033649 | 0.336088 | 0.277455 | 0.901674 | 1.000000 | 0.230196 |
| Census_InternalBatteryNumberOfCharges | 0.117078 | -0.000334 | -0.001289 | 0.182585 | 0.513360 | 0.169044 | 0.230196 | 1.000000 |
max(df.Census_InternalPrimaryDisplayResolutionVertical)
8640.0
max(df.Census_InternalPrimaryDisplayResolutionHorizontal)
12288.0
df[df.Census_InternalPrimaryDisplayResolutionHorizontal==12288.0][numerical_columns]
| Census_ProcessorCoreCount | Census_PrimaryDiskTotalCapacity | Census_SystemVolumeTotalCapacity | Census_TotalPhysicalRAM | Census_InternalPrimaryDiagonalDisplaySizeInInches | Census_InternalPrimaryDisplayResolutionHorizontal | Census_InternalPrimaryDisplayResolutionVertical | Census_InternalBatteryNumberOfCharges | |
|---|---|---|---|---|---|---|---|---|
| 1072023 | 8.0 | 238475.0 | 237428.0 | 32768.0 | 54.59375 | 12288.0 | 2160.0 | 4.294967e+09 |
| 1252344 | 12.0 | 476940.0 | 476389.0 | 16384.0 | 46.00000 | 12288.0 | 2160.0 | 4.294967e+09 |
| 3152519 | 4.0 | 114473.0 | 113661.0 | 16384.0 | 47.40625 | 12288.0 | 2160.0 | 4.294967e+09 |
display_horizontal=np.median(df.Census_InternalPrimaryDisplayResolutionHorizontal)
display_vertical=np.median(df.Census_InternalPrimaryDisplayResolutionVertical)
df['Census_InternalPrimaryDisplayResolutionHorizontal']=[display_horizontal if x>=10000.0 else x for x in df['Census_InternalPrimaryDisplayResolutionHorizontal']]
df['Census_InternalPrimaryDisplayResolutionVertical']=[display_vertical if x>=6000 else x for x in df['Census_InternalPrimaryDisplayResolutionVertical']]
df_test['Census_InternalPrimaryDisplayResolutionHorizontal']=[display_horizontal if x>=10000.0 else x for x in df_test['Census_InternalPrimaryDisplayResolutionHorizontal']]
df_test['Census_InternalPrimaryDisplayResolutionVertical']=[display_vertical if x>=6000 else x for x in df_test['Census_InternalPrimaryDisplayResolutionVertical']]
df['Interaction_01']=df['Census_TotalPhysicalRAM']/df['Census_ProcessorCoreCount']
df_test['Interaction_01']=df_test['Census_TotalPhysicalRAM']/df_test['Census_ProcessorCoreCount']
df['Interaction_02']=abs(df.Census_PrimaryDiskTotalCapacity- df.Census_SystemVolumeTotalCapacity)
df_test['Interaction_02']=abs(df_test.Census_PrimaryDiskTotalCapacity- df_test.Census_SystemVolumeTotalCapacity)
df['Interaction_03']=np.multiply(df.Census_InternalPrimaryDisplayResolutionHorizontal,df.Census_InternalPrimaryDisplayResolutionVertical)
df_test['Interaction_03']=np.multiply(df_test.Census_InternalPrimaryDisplayResolutionHorizontal,df_test.Census_InternalPrimaryDisplayResolutionVertical)
df=reduce_memory(df)
Changing Datatypes please wait....... Datatypes updated and memory usage is reduced by : 330066788
df_test=reduce_memory(df_test)
Changing Datatypes please wait....... Datatypes updated and memory usage is reduced by : 290570361
np.min(df.Census_InternalPrimaryDisplayResolutionHorizontal)
-1.0
np.min(df.Census_InternalPrimaryDisplayResolutionVertical)
-1.0
df[df.Census_InternalPrimaryDisplayResolutionHorizontal==-1][numerical_columns]
| Census_ProcessorCoreCount | Census_PrimaryDiskTotalCapacity | Census_SystemVolumeTotalCapacity | Census_TotalPhysicalRAM | Census_InternalPrimaryDiagonalDisplaySizeInInches | Census_InternalPrimaryDisplayResolutionHorizontal | Census_InternalPrimaryDisplayResolutionVertical | Census_InternalBatteryNumberOfCharges | |
|---|---|---|---|---|---|---|---|---|
| 86641 | 4.0 | 953869.0 | 911955.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 174431 | 4.0 | 953869.0 | 910716.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 245597 | 4.0 | 953869.0 | 912060.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 340295 | 2.0 | 476940.0 | 456362.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 403759 | 4.0 | 476940.0 | 51200.0 | 2048.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 439181 | 4.0 | 953869.0 | 912402.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 540172 | 8.0 | 953869.0 | 939567.0 | 8192.0 | 15.500000 | -1.0 | -1.0 | 4.294967e+09 |
| 627565 | 6.0 | 953869.0 | 99019.0 | 8192.0 | 15.500000 | -1.0 | -1.0 | 4.294967e+09 |
| 695468 | 4.0 | 953869.0 | 911197.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 1003210 | 4.0 | 953869.0 | 911997.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 1133025 | 4.0 | 476940.0 | 433904.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 1138140 | 4.0 | 29600.0 | 28920.0 | 1024.0 | 15.500000 | -1.0 | -1.0 | 1.400000e+01 |
| 1170739 | 4.0 | 476940.0 | 435167.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 1173099 | 4.0 | 476940.0 | 433682.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 1190003 | 8.0 | 953869.0 | 912728.0 | 8192.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 1240120 | 2.0 | 152627.0 | 101949.0 | 3072.0 | 15.500000 | -1.0 | -1.0 | 4.294967e+09 |
| 1259038 | 4.0 | 953869.0 | 910677.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 1301984 | 4.0 | 122104.0 | 106620.0 | 4096.0 | 11.601562 | -1.0 | -1.0 | 0.000000e+00 |
| 1304544 | 2.0 | 476940.0 | 466938.0 | 2048.0 | 15.500000 | -1.0 | -1.0 | 4.294967e+09 |
| 1349133 | 4.0 | 244198.0 | 102401.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 1411395 | 4.0 | 29600.0 | 28482.0 | 2048.0 | 15.500000 | -1.0 | -1.0 | 1.900000e+01 |
| 1545770 | 2.0 | 122104.0 | 105636.0 | 8192.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 1578287 | 4.0 | 122104.0 | 120963.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 1648906 | 4.0 | 122104.0 | 97721.0 | 8192.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 1654681 | 4.0 | 953869.0 | 461862.0 | 6144.0 | 13.898438 | -1.0 | -1.0 | 0.000000e+00 |
| 1792269 | 8.0 | 114473.0 | 113907.0 | 16384.0 | 15.500000 | -1.0 | -1.0 | 4.294967e+09 |
| 1797716 | 8.0 | 122104.0 | 61440.0 | 8192.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 1814557 | 4.0 | 476940.0 | 460857.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 1835472 | 4.0 | 476940.0 | 435687.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 1838169 | 4.0 | 953869.0 | 476159.0 | 8192.0 | 15.500000 | -1.0 | -1.0 | 4.294967e+09 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 7009604 | 4.0 | 953869.0 | 907644.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 7058425 | 2.0 | 476940.0 | 435799.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 7103069 | 2.0 | 305245.0 | 267829.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 7143810 | 4.0 | 953869.0 | 913225.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 7174106 | 2.0 | 953869.0 | 81921.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 4.294967e+09 |
| 7311245 | 2.0 | 476940.0 | 462394.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 8.300000e+01 |
| 7509075 | 2.0 | 305245.0 | 104448.0 | 2048.0 | 15.500000 | -1.0 | -1.0 | 4.294967e+09 |
| 7598414 | 4.0 | 122104.0 | 120817.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 2.800000e+01 |
| 7637536 | 2.0 | 305245.0 | 304693.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 7801589 | 4.0 | 228936.0 | 123000.0 | 8192.0 | 15.500000 | -1.0 | -1.0 | 4.294967e+09 |
| 7841312 | 2.0 | 476940.0 | 460562.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 7841507 | 4.0 | 953869.0 | 184207.0 | 8192.0 | 15.500000 | -1.0 | -1.0 | 4.294967e+09 |
| 7926866 | 4.0 | 715404.0 | 691297.0 | 6144.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 7976558 | 4.0 | 953869.0 | 380358.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 1.250000e+02 |
| 8038076 | 4.0 | 953869.0 | 912986.0 | 6144.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 8100681 | 2.0 | 305245.0 | 289783.0 | 2048.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 8187745 | 2.0 | 476940.0 | 461258.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 8247734 | 6.0 | 953869.0 | 249025.0 | 8192.0 | 15.500000 | -1.0 | -1.0 | 4.294967e+09 |
| 8273003 | 8.0 | 122104.0 | 121027.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 3.300000e+01 |
| 8380921 | 2.0 | 305245.0 | 99495.0 | 2048.0 | 15.500000 | -1.0 | -1.0 | 4.294967e+09 |
| 8381285 | 4.0 | 29600.0 | 28920.0 | 1024.0 | 15.500000 | -1.0 | -1.0 | 9.000000e+00 |
| 8430613 | 2.0 | 152587.0 | 151614.0 | 2048.0 | 15.500000 | -1.0 | -1.0 | 4.294967e+09 |
| 8463891 | 2.0 | 953869.0 | 285672.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 4.294967e+09 |
| 8476044 | 4.0 | 953869.0 | 234246.0 | 8192.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 8528644 | 2.0 | 114473.0 | 113920.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 8563916 | 4.0 | 953869.0 | 910611.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 8653632 | 4.0 | 476940.0 | 435447.0 | 4096.0 | 23.000000 | -1.0 | -1.0 | 0.000000e+00 |
| 8694644 | 4.0 | 953869.0 | 912461.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 8741691 | 4.0 | 22902.0 | 22335.0 | 6144.0 | 15.500000 | -1.0 | -1.0 | 0.000000e+00 |
| 8759520 | 2.0 | 114473.0 | 113500.0 | 4096.0 | 15.500000 | -1.0 | -1.0 | 4.294967e+09 |
156 rows × 8 columns
df['Census_InternalPrimaryDisplayResolutionHorizontal']=[display_horizontal if x==-1 else x for x in df['Census_InternalPrimaryDisplayResolutionHorizontal']]
df['Census_InternalPrimaryDisplayResolutionVertical']=[display_vertical if x==-1 else x for x in df['Census_InternalPrimaryDisplayResolutionVertical']]
df_test['Census_InternalPrimaryDisplayResolutionHorizontal']=[display_horizontal if x==-1 else x for x in df_test['Census_InternalPrimaryDisplayResolutionHorizontal']]
df_test['Census_InternalPrimaryDisplayResolutionVertical']=[display_vertical if x==-1 else x for x in df_test['Census_InternalPrimaryDisplayResolutionVertical']]
df.Census_InternalBatteryNumberOfCharges.describe()
count 8.920724e+06 mean 1.089876e+09 std 1.925254e+09 min 0.000000e+00 25% 0.000000e+00 50% 0.000000e+00 75% 4.294967e+09 max 4.294967e+09 Name: Census_InternalBatteryNumberOfCharges, dtype: float64
df.Census_InternalBatteryNumberOfCharges.value_counts().head()
0.000000e+00 5321817 4.294967e+09 2263693 1.000000e+00 53810 2.000000e+00 28128 1.600000e+01 27348 Name: Census_InternalBatteryNumberOfCharges, dtype: int64
df['PPI']=np.sqrt(np.power(df.Census_InternalPrimaryDisplayResolutionHorizontal,2)+np.power(df.Census_InternalPrimaryDisplayResolutionVertical,2))/df.Census_InternalPrimaryDiagonalDisplaySizeInInches
df_test['PPI']=np.sqrt(np.power(df_test.Census_InternalPrimaryDisplayResolutionHorizontal,2)+np.power(df_test.Census_InternalPrimaryDisplayResolutionVertical,2))/df_test.Census_InternalPrimaryDiagonalDisplaySizeInInches
df=reduce_memory(df)
Changing Datatypes please wait....... Datatypes updated and memory usage is reduced by : 160573032
df_test=reduce_memory(df_test)
Changing Datatypes please wait....... Datatypes updated and memory usage is reduced by : 141358554
print("Memory Usage of training set in Bytes : ",df.memory_usage(deep=True).sum())
print("Memory Usage of test set in Bytes : ",df_test.memory_usage(deep=True).sum())
Memory Usage of training set in Bytes : 3511420023 Memory Usage of test set in Bytes : 3060642463
set(df_test.EngineVersion.unique())-set(df.EngineVersion.unique())
{-1}
%%time
different_columns=[]
for i in range(1,len(df_test.columns)):
if df.columns[i] in ['MachineIdentifier','HasDetections']:
continue
elif len(set(df_test[df.columns[i]].unique())-set(df[df.columns[i]].unique()))==0:
print(df.columns[i])
continue
else:
different_columns.append(df.columns[i])
ProductName IsBeta IsSxsPassiveMode AVProductsInstalled AVProductsEnabled HasTpm CountryIdentifier LocaleEnglishNameIdentifier Platform Processor OsSuite OsPlatformSubRelease SkuEdition IsProtected AutoSampleOptIn SMode Firewall Census_DeviceFamily Census_ProcessorManufacturerIdentifier Census_PrimaryDiskTypeName Census_HasOpticalDiskDrive Census_PowerPlatformRoleName Census_OSArchitecture Census_OSInstallTypeName Census_OSInstallLanguageIdentifier Census_OSWUAutoUpdateOptionsName Census_IsPortableOperatingSystem Census_GenuineStateName Census_ActivationChannel Census_IsFlightsDisabled Census_ThresholdOptIn Census_IsSecureBootEnabled Census_IsWIMBootEnabled Census_IsVirtualDevice Census_IsTouchEnabled Census_IsPenCapable Census_IsAlwaysOnAlwaysConnectedCapable Wdft_IsGamer Wdft_RegionIdentifier Month_1 Month_2 Month_3 Month_4 Month_5 Month_6 Month_7 Month_8 Month_9 Month_10 Month_11 Month_12 Year_14 Year_15 Year_16 Year_17 Year_18 Interaction_MYYear_9Month_1 Interaction_MYYear_9Month_2 Interaction_MYYear_9Month_3 Interaction_MYYear_9Month_4 Interaction_MYYear_9Month_5 Interaction_MYYear_9Month_6 Interaction_MYYear_9Month_9 Interaction_MYYear_9Month_10 Interaction_MYYear_9Month_11 Interaction_MYYear_10Month_1 Interaction_MYYear_10Month_3 Interaction_MYYear_10Month_4 Interaction_MYYear_10Month_5 Interaction_MYYear_10Month_7 Interaction_MYYear_10Month_8 Interaction_MYYear_10Month_12 Interaction_MYYear_11Month_1 Interaction_MYYear_11Month_2 Interaction_MYYear_11Month_3 Interaction_MYYear_11Month_5 Interaction_MYYear_11Month_7 Interaction_MYYear_11Month_8 Interaction_MYYear_11Month_9 Interaction_MYYear_11Month_12 Interaction_MYYear_12Month_1 Interaction_MYYear_12Month_6 Interaction_MYYear_12Month_7 Interaction_MYYear_12Month_9 Interaction_MYYear_12Month_10 Interaction_MYYear_12Month_11 Interaction_MYYear_12Month_12 Interaction_MYYear_13Month_2 Interaction_MYYear_13Month_4 Interaction_MYYear_13Month_6 Interaction_MYYear_13Month_11 Interaction_MYYear_13Month_12 Interaction_MYYear_14Month_1 Interaction_MYYear_14Month_3 Interaction_MYYear_14Month_5 Interaction_MYYear_14Month_6 Interaction_MYYear_14Month_7 Interaction_MYYear_14Month_9 Interaction_MYYear_14Month_10 Interaction_MYYear_14Month_11 Interaction_MYYear_15Month_1 Interaction_MYYear_15Month_3 Interaction_MYYear_15Month_6 Interaction_MYYear_15Month_7 Interaction_MYYear_15Month_8 Interaction_MYYear_15Month_9 Interaction_MYYear_15Month_10 Interaction_MYYear_15Month_11 Interaction_MYYear_16Month_1 Interaction_MYYear_16Month_2 Interaction_MYYear_16Month_3 Interaction_MYYear_16Month_4 Interaction_MYYear_16Month_5 Interaction_MYYear_16Month_6 Interaction_MYYear_16Month_7 Interaction_MYYear_16Month_8 Interaction_MYYear_16Month_9 Interaction_MYYear_16Month_10 Interaction_MYYear_16Month_11 Interaction_MYYear_16Month_12 Interaction_MYYear_17Month_1 Interaction_MYYear_17Month_2 Interaction_MYYear_17Month_3 Interaction_MYYear_17Month_4 Interaction_MYYear_17Month_5 Interaction_MYYear_17Month_6 Interaction_MYYear_17Month_7 Interaction_MYYear_17Month_8 Interaction_MYYear_17Month_9 Interaction_MYYear_17Month_10 Interaction_MYYear_17Month_11 Interaction_MYYear_17Month_12 Interaction_MYYear_18Month_1 Interaction_MYYear_18Month_2 Interaction_MYYear_18Month_3 Interaction_MYYear_18Month_4 Interaction_MYYear_18Month_5 Interaction_MYYear_18Month_6 Interaction_MYYear_18Month_7 Interaction_MYYear_18Month_8 Interaction_MYYear_18Month_9 Interaction_MYYear_18Month_12 Device_PossibleOwnership AV_highrisk AV_mediumrisk AV_lowrisk Wall time: 39.7 s
for col in different_columns:
print("Number of Missing Values in Test set for "+str(col)+" : ",len(set(df_test[col].unique())-set(df[col].unique())))
print("Number of Categories : ",df[col].nunique())
print("Number of rows with missing categories in test data , ",df_test[df_test[col].isin(list(set(df_test[col].unique())-set(df[col].unique())))].shape[0])
print("\n")
Number of Missing Values in Test set for EngineVersion : 1 Number of Categories : 70 Number of rows with missing categories in test data , 3619770 Number of Missing Values in Test set for AppVersion : 10 Number of Categories : 107 Number of rows with missing categories in test data , 2306148 Number of Missing Values in Test set for AvSigVersion : 1 Number of Categories : 8531 Number of rows with missing categories in test data , 6598309 Number of Missing Values in Test set for RtpStateBitfield : 1 Number of Categories : 7 Number of rows with missing categories in test data , 1 Number of Missing Values in Test set for AVProductStatesIdentifier : 1 Number of Categories : 28969 Number of rows with missing categories in test data , 16886 Number of Missing Values in Test set for CityIdentifier : 1 Number of Categories : 107366 Number of rows with missing categories in test data , 52481 Number of Missing Values in Test set for OrganizationIdentifier : 1 Number of Categories : 49 Number of rows with missing categories in test data , 7 Number of Missing Values in Test set for GeoNameIdentifier : 1 Number of Categories : 292 Number of rows with missing categories in test data , 4 Number of Missing Values in Test set for OsVer : 1 Number of Categories : 58 Number of rows with missing categories in test data , 13 Number of Missing Values in Test set for OsBuild : 20 Number of Categories : 61 Number of rows with missing categories in test data , 7350 Number of Missing Values in Test set for OsBuildLab : 1 Number of Categories : 663 Number of rows with missing categories in test data , 95381 Number of Missing Values in Test set for IeVerIdentifier : 1 Number of Categories : 303 Number of rows with missing categories in test data , 27338 Number of Missing Values in Test set for SmartScreen : 1 Number of Categories : 7 Number of rows with missing categories in test data , 163164 Number of Missing Values in Test set for UacLuaenable : 1 Number of Categories : 11 Number of rows with missing categories in test data , 2 Number of Missing Values in Test set for Census_MDC2FormFactor : 1 Number of Categories : 13 Number of rows with missing categories in test data , 1 Number of Missing Values in Test set for Census_OEMNameIdentifier : 48 Number of Categories : 2137 Number of rows with missing categories in test data , 147 Number of Missing Values in Test set for Census_OEMModelIdentifier : 1 Number of Categories : 175347 Number of rows with missing categories in test data , 80321 Number of Missing Values in Test set for Census_ProcessorCoreCount : 4 Number of Categories : 45 Number of rows with missing categories in test data , 4 Number of Missing Values in Test set for Census_ProcessorModelIdentifier : 1 Number of Categories : 2583 Number of rows with missing categories in test data , 426 Number of Missing Values in Test set for Census_PrimaryDiskTotalCapacity : 3062 Number of Categories : 5734 Number of rows with missing categories in test data , 3592 Number of Missing Values in Test set for Census_SystemVolumeTotalCapacity : 99275 Number of Categories : 536839 Number of rows with missing categories in test data , 139515 Number of Missing Values in Test set for Census_TotalPhysicalRAM : 1830 Number of Categories : 3446 Number of rows with missing categories in test data , 2363 Number of Missing Values in Test set for Census_ChassisTypeName : 1 Number of Categories : 52 Number of rows with missing categories in test data , 6 Number of Missing Values in Test set for Census_InternalPrimaryDiagonalDisplaySizeInInches : 84 Number of Categories : 785 Number of rows with missing categories in test data , 116 Number of Missing Values in Test set for Census_InternalPrimaryDisplayResolutionHorizontal : 386 Number of Categories : 2046 Number of rows with missing categories in test data , 641 Number of Missing Values in Test set for Census_InternalPrimaryDisplayResolutionVertical : 233 Number of Categories : 1549 Number of rows with missing categories in test data , 408 Number of Missing Values in Test set for Census_InternalBatteryNumberOfCharges : 11749 Number of Categories : 41087 Number of rows with missing categories in test data , 17043 Number of Missing Values in Test set for Census_OSVersion : 1 Number of Categories : 469 Number of rows with missing categories in test data , 2741713 Number of Missing Values in Test set for Census_OSBranch : 1 Number of Categories : 32 Number of rows with missing categories in test data , 14 Number of Missing Values in Test set for Census_OSBuildNumber : 24 Number of Categories : 130 Number of rows with missing categories in test data , 7783 Number of Missing Values in Test set for Census_OSBuildRevision : 1 Number of Categories : 285 Number of rows with missing categories in test data , 2599915 Number of Missing Values in Test set for Census_OSEdition : 1 Number of Categories : 31 Number of rows with missing categories in test data , 2 Number of Missing Values in Test set for Census_OSSkuName : 1 Number of Categories : 30 Number of rows with missing categories in test data , 2 Number of Missing Values in Test set for Census_OSUILocaleIdentifier : 1 Number of Categories : 143 Number of rows with missing categories in test data , 2 Number of Missing Values in Test set for Census_FlightRing : 1 Number of Categories : 10 Number of rows with missing categories in test data , 1 Number of Missing Values in Test set for Census_FirmwareManufacturerIdentifier : 1 Number of Categories : 712 Number of rows with missing categories in test data , 309 Number of Missing Values in Test set for Census_FirmwareVersionIdentifier : 1 Number of Categories : 6569 Number of rows with missing categories in test data , 1688 Number of Missing Values in Test set for Interaction_MYYear_9Month_8 : 1 Number of Categories : 1 Number of rows with missing categories in test data , 1 Number of Missing Values in Test set for Interaction_MYYear_12Month_2 : 1 Number of Categories : 1 Number of rows with missing categories in test data , 1 Number of Missing Values in Test set for Interaction_MYYear_14Month_4 : 1 Number of Categories : 1 Number of rows with missing categories in test data , 1 Number of Missing Values in Test set for Interaction_MYYear_18Month_10 : 1 Number of Categories : 1 Number of rows with missing categories in test data , 15364 Number of Missing Values in Test set for Interaction_MYYear_18Month_11 : 1 Number of Categories : 1 Number of rows with missing categories in test data , 2025 Number of Missing Values in Test set for Interaction_01 : 2593 Number of Categories : 4465 Number of rows with missing categories in test data , 3169 Number of Missing Values in Test set for Interaction_02 : 140338 Number of Categories : 565079 Number of rows with missing categories in test data , 200917 Number of Missing Values in Test set for Interaction_03 : 7805 Number of Categories : 9615 Number of rows with missing categories in test data , 8530
df.drop(columns=numerical_columns,inplace=True)
df_test.drop(columns=numerical_columns,inplace=True)
df.head()
| MachineIdentifier | ProductName | EngineVersion | AppVersion | AvSigVersion | IsBeta | RtpStateBitfield | IsSxsPassiveMode | AVProductStatesIdentifier | AVProductsInstalled | ... | Interaction_MYYear_18Month_11 | Interaction_MYYear_18Month_12 | Device_PossibleOwnership | AV_highrisk | AV_mediumrisk | AV_lowrisk | Interaction_01 | Interaction_02 | Interaction_03 | PPI | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0000028988387b115f69f31a3bf04f09 | 0.0 | 0 | 0.0 | 0 | 0 | 0.0 | 0 | 0 | 1.0 | ... | 0 | 0 | 0 | 0 | 1 | 0 | 1024.0 | 177489.0 | 1296000.0 | 89.8125 |
| 1 | 000007535c3f730efa9ea0b7ef1bd645 | 0.0 | 1 | 5.0 | 1 | 0 | 0.0 | 0 | 0 | 1.0 | ... | 0 | 0 | 0 | 0 | 1 | 0 | 1024.0 | 374555.0 | 1049088.0 | 112.7500 |
| 2 | 000007905a28d863f6d0d597892cd692 | 0.0 | 0 | 0.0 | 2 | 0 | 0.0 | 0 | 0 | 1.0 | ... | 0 | 0 | 0 | 0 | 1 | 0 | 1024.0 | 566.0 | 2073600.0 | 102.4375 |
| 3 | 00000b11598a75ea8ba1beea8459149f | 0.0 | 0 | 0.0 | 3 | 0 | 0.0 | 0 | 0 | 1.0 | ... | 0 | 0 | 1 | 0 | 1 | 0 | 1024.0 | 11359.0 | 1049088.0 | 84.6875 |
| 4 | 000014a5f00daa18e76b81417eeb99fc | 0.0 | 0 | 0.0 | 4 | 0 | 0.0 | 0 | 0 | 1.0 | ... | 0 | 0 | 0 | 0 | 1 | 0 | 1536.0 | 375040.0 | 1049088.0 | 111.9375 |
5 rows × 186 columns
df_test.head()
| MachineIdentifier | ProductName | EngineVersion | AppVersion | AvSigVersion | IsBeta | RtpStateBitfield | IsSxsPassiveMode | AVProductStatesIdentifier | AVProductsInstalled | ... | Interaction_MYYear_18Month_11 | Interaction_MYYear_18Month_12 | Device_PossibleOwnership | AV_highrisk | AV_mediumrisk | AV_lowrisk | Interaction_01 | Interaction_02 | Interaction_03 | PPI | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0000010489e3af074adeac69c53e555e | 0.0 | -1 | 2.0 | -1 | 0 | 0.0 | 0 | 0 | 1.0 | ... | 0 | 0 | 0 | 0 | 1 | 0 | 2048.0 | 365207.0 | 2073600.0 | 142.1250 |
| 1 | 00000176ac758d54827acd545b6315a5 | 0.0 | -1 | 1.0 | -1 | 0 | 0.0 | 0 | 0 | 1.0 | ... | 0 | 0 | 0 | 0 | 1 | 0 | 2048.0 | 25377.0 | 1049088.0 | 101.1250 |
| 2 | 0000019dcefc128c2d4387c1273dae1d | 0.0 | 3 | 1.0 | -1 | 0 | 0.0 | 0 | 14 | 2.0 | ... | 0 | 0 | 0 | 0 | 0 | 0 | 2048.0 | 1142.0 | 921600.0 | 105.6875 |
| 3 | 0000055553dc51b1295785415f1a224d | 0.0 | -1 | 2.0 | -1 | 0 | 0.0 | 0 | 79 | 2.0 | ... | 0 | 0 | 0 | 0 | 0 | 0 | 2048.0 | 37595.0 | 1049088.0 | 111.9375 |
| 4 | 00000574cefffeca83ec8adf9285b2bf | 0.0 | -1 | 1.0 | -1 | 0 | 0.0 | 0 | 0 | 1.0 | ... | 0 | 0 | 0 | 0 | 1 | 0 | 512.0 | 15434.0 | 1049088.0 | 101.1250 |
5 rows × 185 columns
df.columns[df.isnull().any()]
Index([], dtype='object')
df.drop(columns=['AVProductsEnabled','AVProductsInstalled'],inplace=True)
df_test.drop(columns=['AVProductsEnabled','AVProductsInstalled'],inplace=True)
#df,df_test=frequency_encode(df=df,df_test=df_test,col='Year')
#df,df_test=frequency_encode(df=df,df_test=df_test,col='Month')
df.shape
(8920724, 184)
df_test.shape
(7853253, 183)
for col in year_columns:
if col in df.columns:
df.drop(columns=[col],inplace=True)
df_test.drop(columns=[col],inplace=True)
else:
continue
for col in month_columns:
if col in df.columns:
df.drop(columns=[col],inplace=True)
df_test.drop(columns=[col],inplace=True)
else:
continue
set(df.columns)-set(df_test.columns)
{'HasDetections'}
%%time
for col in df.columns[1:]:
if col in ['MachineIdentifier','HasDetections']:
continue
elif df[col].nunique()==1:
print(col)
df.drop(columns=[col],inplace=True)
df_test.drop(columns=[col],inplace=True)
else:
continue
Interaction_MYYear_9Month_1 Interaction_MYYear_9Month_2 Interaction_MYYear_9Month_3 Interaction_MYYear_9Month_4 Interaction_MYYear_9Month_5 Interaction_MYYear_9Month_6 Interaction_MYYear_9Month_8 Interaction_MYYear_9Month_9 Interaction_MYYear_9Month_10 Interaction_MYYear_9Month_11 Interaction_MYYear_10Month_1 Interaction_MYYear_10Month_3 Interaction_MYYear_10Month_4 Interaction_MYYear_10Month_5 Interaction_MYYear_10Month_7 Interaction_MYYear_10Month_8 Interaction_MYYear_10Month_12 Interaction_MYYear_11Month_1 Interaction_MYYear_11Month_2 Interaction_MYYear_11Month_3 Interaction_MYYear_11Month_5 Interaction_MYYear_11Month_7 Interaction_MYYear_11Month_8 Interaction_MYYear_11Month_9 Interaction_MYYear_11Month_12 Interaction_MYYear_12Month_1 Interaction_MYYear_12Month_2 Interaction_MYYear_12Month_6 Interaction_MYYear_12Month_7 Interaction_MYYear_12Month_9 Interaction_MYYear_12Month_10 Interaction_MYYear_12Month_11 Interaction_MYYear_12Month_12 Interaction_MYYear_13Month_2 Interaction_MYYear_13Month_4 Interaction_MYYear_13Month_6 Interaction_MYYear_13Month_11 Interaction_MYYear_13Month_12 Interaction_MYYear_14Month_1 Interaction_MYYear_14Month_4 Interaction_MYYear_14Month_5 Interaction_MYYear_14Month_6 Interaction_MYYear_14Month_9 Interaction_MYYear_14Month_11 Interaction_MYYear_15Month_6 Interaction_MYYear_18Month_10 Interaction_MYYear_18Month_11 Interaction_MYYear_18Month_12 Wall time: 3min 2s
l=[]
for col in df.columns:
if col in ['MachineIdentifier','HasDetections']:
continue
else:
l.append(col)
l=['MachineIdentifier']+l+['HasDetections']
df=df[l]
#train=df.sample(frac=0.7,random_state=1)
#train.drop(columns=['AutoSampleOptIn','IsBeta','Census_IsPortableOperatingSystem','Census_IsFlightsDisabled'],inplace=True)
#df.drop(columns=['AutoSampleOptIn','IsBeta','Census_IsPortableOperatingSystem','Census_IsFlightsDisabled'],inplace=True)
df_test.drop(columns=['AutoSampleOptIn','IsBeta','Census_IsPortableOperatingSystem','Census_IsFlightsDisabled'],inplace=True)
df.shape
#df.drop(columns=categorical_columns,inplace=True)
#df_test.drop(columns=categorical_columns,inplace=True)
df.drop(columns=['AvSigVersion'],inplace=True)
#train.drop(columns=['AvSigVersion'],inplace=True)
df_test.drop(columns=['AvSigVersion'],inplace=True)
train=df.sample(frac=0.7,random_state=1)
X=train.iloc[:,1:-1]
y=train.iloc[:,-1]
from mlxtend.plotting import plot_learning_curves
from mlxtend.plotting import plot_decision_regions
from sklearn.neighbors import KNeighborsClassifier
from sklearn.ensemble import BaggingClassifier
import matplotlib.gridspec as gridspec
from sklearn.model_selection import cross_val_score, train_test_split
from sklearn.preprocessing import StandardScaler
ss=StandardScaler()
ss.fit(X)
StandardScaler(copy=True, with_mean=True, with_std=True)
X=pd.DataFrame(data=ss.transform(X),columns=X.columns.tolist())
set(X.columns)-set(df_test.columns)
set()
df_test=pd.DataFrame(data=ss.transform(df_test[X.columns]),columns=X.columns.tolist())
#X.drop(columns=['Platform_w10', 'Platform_w2016', 'Platform_w8'],inplace=True)
X.shape
(6244507, 112)
%%time
fig,ax=plt.subplots(figsize=(12,12))
corr=X.corr()
# plot the heatmap
sns.heatmap(corr,xticklabels=corr.columns,yticklabels=corr.columns,ax=ax)
Wall time: 2min 57s
cmap=sns.diverging_palette(5, 250, as_cmap=True)
def magnify():
return [dict(selector="th",
props=[("font-size", "7pt")]),
dict(selector="td",
props=[('padding', "0em 0em")]),
dict(selector="th:hover",
props=[("font-size", "12pt")]),
dict(selector="tr:hover td:hover",
props=[('max-width', '200px'),
('font-size', '12pt')])
]
corr.style.background_gradient(cmap, axis=1)\
.set_properties(**{'max-width': '80px', 'font-size': '10pt'})\
.set_caption("Hover to magify")\
.set_precision(2)\
.set_table_styles(magnify())
C:\Users\gandh\Anaconda3\lib\site-packages\matplotlib\colors.py:504: RuntimeWarning: invalid value encountered in less xa[xa < 0] = -1
| ProductName | EngineVersion | AppVersion | RtpStateBitfield | IsSxsPassiveMode | AVProductStatesIdentifier | HasTpm | CountryIdentifier | CityIdentifier | OrganizationIdentifier | GeoNameIdentifier | LocaleEnglishNameIdentifier | Platform | Processor | OsVer | OsBuild | OsSuite | OsPlatformSubRelease | OsBuildLab | SkuEdition | IsProtected | SMode | IeVerIdentifier | SmartScreen | Firewall | UacLuaenable | Census_MDC2FormFactor | Census_DeviceFamily | Census_OEMNameIdentifier | Census_OEMModelIdentifier | Census_ProcessorManufacturerIdentifier | Census_ProcessorModelIdentifier | Census_PrimaryDiskTypeName | Census_HasOpticalDiskDrive | Census_ChassisTypeName | Census_PowerPlatformRoleName | Census_OSVersion | Census_OSArchitecture | Census_OSBranch | Census_OSBuildNumber | Census_OSBuildRevision | Census_OSEdition | Census_OSSkuName | Census_OSInstallTypeName | Census_OSInstallLanguageIdentifier | Census_OSUILocaleIdentifier | Census_OSWUAutoUpdateOptionsName | Census_GenuineStateName | Census_ActivationChannel | Census_FlightRing | Census_ThresholdOptIn | Census_FirmwareManufacturerIdentifier | Census_FirmwareVersionIdentifier | Census_IsSecureBootEnabled | Census_IsWIMBootEnabled | Census_IsVirtualDevice | Census_IsTouchEnabled | Census_IsPenCapable | Census_IsAlwaysOnAlwaysConnectedCapable | Wdft_IsGamer | Wdft_RegionIdentifier | Interaction_MYYear_14Month_3 | Interaction_MYYear_14Month_7 | Interaction_MYYear_14Month_10 | Interaction_MYYear_15Month_1 | Interaction_MYYear_15Month_3 | Interaction_MYYear_15Month_7 | Interaction_MYYear_15Month_8 | Interaction_MYYear_15Month_9 | Interaction_MYYear_15Month_10 | Interaction_MYYear_15Month_11 | Interaction_MYYear_16Month_1 | Interaction_MYYear_16Month_2 | Interaction_MYYear_16Month_3 | Interaction_MYYear_16Month_4 | Interaction_MYYear_16Month_5 | Interaction_MYYear_16Month_6 | Interaction_MYYear_16Month_7 | Interaction_MYYear_16Month_8 | Interaction_MYYear_16Month_9 | Interaction_MYYear_16Month_10 | Interaction_MYYear_16Month_11 | Interaction_MYYear_16Month_12 | Interaction_MYYear_17Month_1 | Interaction_MYYear_17Month_2 | Interaction_MYYear_17Month_3 | Interaction_MYYear_17Month_4 | Interaction_MYYear_17Month_5 | Interaction_MYYear_17Month_6 | Interaction_MYYear_17Month_7 | Interaction_MYYear_17Month_8 | Interaction_MYYear_17Month_9 | Interaction_MYYear_17Month_10 | Interaction_MYYear_17Month_11 | Interaction_MYYear_17Month_12 | Interaction_MYYear_18Month_1 | Interaction_MYYear_18Month_2 | Interaction_MYYear_18Month_3 | Interaction_MYYear_18Month_4 | Interaction_MYYear_18Month_5 | Interaction_MYYear_18Month_6 | Interaction_MYYear_18Month_7 | Interaction_MYYear_18Month_8 | Interaction_MYYear_18Month_9 | Device_PossibleOwnership | AV_highrisk | AV_mediumrisk | AV_lowrisk | Interaction_01 | Interaction_02 | Interaction_03 | PPI | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| ProductName | 1 | -0.0078 | 0.086 | -0.0079 | -0.014 | 0.046 | -0.94 | -0.028 | 0.023 | -0.00019 | -0.022 | -0.0054 | 0.29 | 0.073 | 0.3 | 0.37 | -0.028 | 0.4 | 0.28 | 0.2 | 0.0095 | -0.0021 | 0.38 | -0.028 | 0.0096 | 0.17 | -0.021 | -0.0038 | 0.0063 | 0.037 | 0.0082 | 0.046 | 0.03 | -0.0049 | -0.006 | 0.014 | 0.092 | 0.072 | 0.037 | 0.15 | 0.094 | -0.042 | -0.037 | 0.011 | 0.0048 | 0.014 | 0.008 | -0.012 | -0.093 | -0.019 | 0.0006 | 0.012 | 0.024 | -0.093 | nan | -0.0044 | -0.03 | -0.017 | -0.025 | -0.062 | 0.047 | 0.017 | 0.0016 | -0.00083 | 0.0098 | 0.046 | -0.0041 | -0.0027 | 0.002 | -0.0019 | -0.0041 | 0.0045 | 0.0064 | 0.016 | 0.11 | -0.0012 | -0.0038 | -0.0086 | -0.004 | 0.014 | 0.0069 | -0.0052 | -0.0073 | -0.00093 | 0.037 | -0.031 | 0.0036 | 0.059 | -0.016 | -0.0017 | 0.039 | -0.033 | -0.0044 | -0.0055 | 0.01 | 0.084 | -0.005 | 0.0031 | -0.084 | -0.022 | 0.25 | -0.0086 | 0.32 | -0.0024 | -0.031 | -4.1e-05 | 0.022 | -0.01 | -0.014 | -5.9e-05 | -0.027 | -0.04 |
| EngineVersion | -0.0078 | 1 | 0.31 | 0.017 | -0.0036 | 0.04 | 0.0042 | 0.00037 | -0.0014 | -0.005 | 0.0013 | -0.00021 | 0.00091 | 0.034 | 0.0046 | 0.1 | 0.032 | 0.11 | 0.13 | -0.032 | -0.23 | -0.00068 | 0.11 | -0.021 | 0.0044 | -0.0049 | -0.013 | -0.011 | -0.0016 | -0.00088 | 0.011 | 0.0063 | -0.018 | 0.0026 | 0.0069 | -0.022 | 0.14 | 0.034 | 0.11 | 0.11 | 0.14 | -0.0052 | 0.014 | 0.037 | -0.0041 | -0.013 | -0.0029 | 0.0075 | -0.0094 | 0.019 | 0.0011 | 0.011 | -0.007 | 0.02 | nan | -0.0025 | 0.0015 | -0.012 | 0.0016 | -0.084 | 0.001 | 0.0043 | 0.0043 | 0.0043 | 0.0031 | 0.0022 | 0.015 | 0.0076 | 0.0047 | 0.0076 | 0.017 | 0.011 | 0.02 | 0.027 | 0.022 | 0.028 | 0.022 | 0.013 | 0.036 | 0.037 | 0.052 | 0.032 | 0.048 | 0.0011 | 0.013 | 0.059 | 0.041 | 0.0092 | 0.028 | 0.055 | 0.011 | 0.069 | 0.02 | 0.028 | 0.021 | 0.015 | 0.015 | -0.0019 | -0.12 | -0.041 | -0.042 | -0.011 | 0.011 | 0.012 | -0.021 | -2.9e-05 | -0.09 | 0.03 | -0.036 | -4.1e-05 | -0.045 | -0.0067 |
| AppVersion | 0.086 | 0.31 | 1 | -0.0011 | -0.021 | 0.041 | -0.081 | -0.026 | 0.0037 | -0.0082 | -0.026 | -0.015 | 0.084 | 0.071 | 0.09 | 0.39 | 0.021 | 0.41 | 0.43 | 0.05 | -0.2 | -0.0093 | 0.47 | -0.016 | -0.0013 | 0.015 | -0.0026 | -0.013 | 0.0021 | -0.0011 | 0.011 | 0.013 | -0.0018 | 0.0038 | 0.0083 | -0.022 | 0.37 | 0.071 | 0.32 | 0.39 | 0.36 | 0.05 | 0.039 | 0.067 | -0.023 | -0.034 | -0.037 | 0.054 | 0.02 | 0.075 | 0.0024 | 0.015 | -0.0075 | 0.0085 | nan | -0.0026 | 0.0054 | -0.022 | 0.015 | -0.12 | 0.0067 | 0.048 | 0.036 | 0.051 | 0.034 | 0.033 | 0.13 | 0.057 | 0.03 | 0.069 | 0.048 | 0.053 | 0.065 | 0.056 | 0.085 | 0.044 | 0.11 | 0.056 | 0.12 | 0.11 | 0.16 | 0.063 | 0.06 | 0.029 | 0.03 | 0.15 | 0.13 | 0.0066 | 0.095 | 0.18 | 0.0082 | 0.033 | 0.04 | 0.05 | 0.031 | 0.015 | 0.028 | -0.002 | -0.29 | -0.11 | 0.043 | 0.028 | 0.0096 | 0.026 | -0.0028 | -0.00023 | -0.099 | 0.026 | -0.064 | -8.3e-05 | -0.083 | -0.011 |
| RtpStateBitfield | -0.0079 | 0.017 | -0.0011 | 1 | 0.53 | 0.11 | 0.0081 | -0.00042 | -0.0021 | 4.3e-05 | 0.004 | 0.00062 | -0.00091 | -0.012 | -0.00052 | -0.019 | 0.012 | -0.021 | -0.015 | -0.015 | -0.14 | -0.0026 | -0.022 | 0.02 | -0.0091 | 0.0044 | -0.015 | -0.00012 | -0.0074 | -0.0028 | 0.00034 | -0.0062 | -0.012 | 0.0029 | -0.0059 | -0.013 | -0.0098 | -0.012 | -0.02 | -0.02 | -0.0094 | 0.0034 | 0.01 | 0.0006 | 0.0045 | 0.00037 | 0.0088 | -0.0086 | 0.0077 | 0.00013 | -0.00053 | -0.0056 | -0.0049 | 0.017 | nan | -0.0079 | -0.0087 | -0.0054 | -0.018 | 0.01 | -0.007 | -0.00092 | -0.00091 | -0.001 | -0.00046 | -0.00039 | -0.0074 | -0.0033 | -0.0018 | -0.0064 | -0.0048 | -0.004 | -0.0052 | -0.0032 | -0.0039 | -0.0035 | -0.0039 | 0.0014 | -0.0033 | -0.0012 | -0.0028 | 2.7e-05 | 0.0024 | -0.00022 | 0.00032 | -0.0062 | -0.0017 | -0.00023 | -0.015 | -0.0033 | 0.00023 | -0.0047 | -0.00098 | 0.0012 | -0.0024 | -0.00054 | -0.0013 | 0.00024 | 0.011 | 0.01 | -0.0025 | 0.0015 | -0.0018 | -0.00016 | 0.016 | 0.0048 | -0.14 | 0.084 | 0.011 | -8.1e-05 | 0.005 | -0.0045 |
| IsSxsPassiveMode | -0.014 | -0.0036 | -0.021 | 0.53 | 1 | 0.12 | 0.014 | 0.0082 | -0.0015 | 0.00066 | 0.0077 | 0.0038 | -0.024 | -0.013 | -0.022 | -0.038 | 0.015 | -0.042 | -0.024 | -0.02 | -0.035 | -0.0028 | -0.034 | 0.028 | -0.0019 | 6.6e-05 | -0.015 | -0.0053 | -0.0074 | -0.002 | -0.0018 | -0.0095 | -0.0057 | 0.0086 | -0.0081 | -0.011 | -0.021 | -0.013 | -0.039 | -0.035 | -0.02 | -7.7e-05 | 0.0097 | -0.0025 | 0.0068 | 0.0029 | 0.019 | -0.024 | 0.013 | -0.0036 | -0.00054 | -0.0073 | -0.0037 | 0.02 | nan | -0.01 | -0.0061 | -0.0013 | -0.016 | 0.031 | 0.0014 | -0.0011 | -0.00084 | -0.0011 | -0.00066 | -0.001 | -0.0083 | -0.0036 | -0.0018 | -0.0079 | -0.0054 | -0.0054 | -0.0071 | -0.0041 | -0.0053 | -0.005 | -0.0055 | 0.00028 | -0.0039 | -0.0017 | -0.0037 | 0.00095 | 0.0043 | 2.1e-05 | -0.0017 | -0.012 | -0.0013 | -0.0016 | -0.02 | -0.003 | -0.0016 | -0.016 | -0.0011 | 0.0033 | -0.002 | -0.0026 | -0.002 | 0.003 | 0.028 | 0.012 | -0.013 | -0.011 | -0.0052 | 0.00029 | 0.022 | -5.3e-05 | -0.2 | 0.044 | 0.017 | -7.5e-05 | 0.018 | 0.0033 |
| AVProductStatesIdentifier | 0.046 | 0.04 | 0.041 | 0.11 | 0.12 | 1 | -0.041 | 0.00087 | 0.006 | 0.00056 | 0.0078 | 0.0082 | 0.026 | -0.0043 | 0.029 | 0.029 | 0.021 | 0.033 | 0.024 | -0.012 | -0.068 | -0.0024 | 0.026 | -0.0034 | -0.021 | 0.009 | -0.017 | -0.0046 | -0.0071 | 0.0095 | 0.0038 | -0.002 | -0.015 | 0.0076 | -0.008 | -0.013 | 0.016 | -0.0043 | 0.0082 | 0.018 | 0.018 | -0.0057 | 0.012 | -0.0074 | 0.023 | 0.017 | 0.033 | -0.021 | -0.0043 | -0.002 | 9.8e-05 | -0.00056 | 0.00015 | 0.023 | nan | -0.0082 | -0.0086 | -0.0079 | -0.018 | -0.0057 | 0.015 | 0.0073 | 0.0077 | 0.0058 | 0.0043 | 0.0042 | -0.0022 | -0.00086 | 0.00048 | -0.0039 | -0.0013 | -0.00058 | -0.00028 | 0.0031 | 0.0061 | 0.00069 | -0.00012 | -0.0032 | 0.0042 | 0.004 | 0.0052 | 0.0029 | 0.0072 | -0.00088 | 0.0024 | -0.00068 | 0.0037 | 0.0055 | 0.00089 | 0.0043 | 0.0043 | -0.0029 | 0.0013 | 0.0056 | 0.0035 | 0.0069 | 9.4e-05 | 0.0037 | -0.02 | 0.013 | 0.014 | 0.0054 | 0.012 | -0.00041 | -0.0073 | 0.0067 | -0.17 | 0.25 | 0.0027 | -6.5e-05 | -0.0043 | -0.011 |
| HasTpm | -0.94 | 0.0042 | -0.081 | 0.0081 | 0.014 | -0.041 | 1 | 0.029 | -0.022 | -0.00041 | 0.023 | 0.0071 | -0.29 | -0.067 | -0.29 | -0.35 | 0.024 | -0.38 | -0.27 | -0.19 | -0.0091 | 0.0023 | -0.36 | 0.026 | -0.0092 | -0.16 | 0.019 | -0.015 | -0.0056 | -0.038 | -0.0059 | -0.045 | -0.029 | 0.0051 | 0.00077 | -0.013 | -0.093 | -0.066 | -0.037 | -0.14 | -0.094 | 0.033 | 0.028 | -0.014 | -0.0046 | -0.013 | -0.0092 | 0.0075 | 0.091 | 0.017 | -0.00044 | -0.01 | -0.023 | 0.086 | nan | 0.0021 | 0.029 | 0.017 | 0.026 | 0.062 | -0.043 | -0.018 | -0.0042 | -0.0035 | -0.01 | -0.044 | 0.0044 | 0.0028 | -0.0019 | 0.002 | 0.0042 | -0.0041 | -0.0059 | -0.015 | -0.1 | 0.001 | 0.0032 | 0.0088 | 0.0035 | -0.013 | -0.0063 | 0.0052 | 0.0068 | 0.00099 | -0.036 | 0.03 | -0.0034 | -0.056 | 0.017 | 0.0021 | -0.037 | 0.026 | 0.0039 | 0.0052 | -0.0096 | -0.08 | 0.0043 | -0.0029 | 0.085 | 0.021 | -0.24 | 0.003 | -0.3 | 0.0026 | 0.03 | 4.4e-05 | -0.026 | 0.011 | 0.012 | 6.3e-05 | 0.025 | 0.039 |
| CountryIdentifier | -0.028 | 0.00037 | -0.026 | -0.00042 | 0.0082 | 0.00087 | 0.029 | 1 | -0.1 | -0.036 | 0.36 | 0.25 | -0.048 | -0.0078 | -0.046 | -0.077 | -0.0073 | -0.08 | -0.041 | -0.0056 | -0.034 | 0.0018 | -0.054 | 0.0062 | 0.013 | -0.013 | -0.028 | -0.008 | -0.028 | -0.013 | -0.021 | -0.032 | -0.028 | 0.025 | -0.0053 | -0.034 | -0.071 | -0.0078 | -0.076 | -0.064 | -0.07 | -0.12 | -0.084 | -0.15 | -0.061 | 0.0068 | -0.0066 | -0.0053 | 0.047 | -0.022 | -0.002 | 0.0075 | -0.035 | 0.019 | nan | -0.0096 | 0.031 | 0.0011 | -0.014 | 0.021 | -0.005 | -0.0029 | -0.0023 | -0.0032 | -0.0023 | -0.0028 | 0.03 | 0.014 | 0.0034 | -0.0031 | 0.0095 | -0.0044 | 0.0021 | 0.0027 | -0.0032 | 0.0031 | 0.0041 | -0.0094 | -0.00078 | 0.0037 | -0.005 | -0.0088 | -0.0099 | 0.0017 | -0.0039 | -0.042 | -0.0078 | -0.0035 | 0.014 | -0.0075 | -0.0036 | -0.034 | -0.0059 | -0.0068 | -0.006 | -0.0044 | -0.006 | -0.023 | 0.12 | -0.062 | -0.03 | -0.023 | -0.028 | -0.0094 | 0.021 | -0.00024 | -0.048 | 0.011 | 0.019 | 5.4e-05 | -0.015 | -0.013 |
| CityIdentifier | 0.023 | -0.0014 | 0.0037 | -0.0021 | -0.0015 | 0.006 | -0.022 | -0.1 | 1 | -0.018 | -0.066 | -0.021 | 0.015 | -0.0021 | 0.012 | 0.019 | 0.01 | 0.018 | 0.014 | -0.0027 | 0.022 | 0.0036 | 0.014 | -0.0098 | 0.0088 | 0.0013 | 0.009 | 0.0062 | 0.012 | 0.022 | 0.031 | 0.034 | 0.033 | -0.0078 | -0.011 | 0.027 | 0.015 | -0.0022 | 0.0074 | 0.01 | 0.015 | -0.069 | -0.026 | 0.031 | 0.044 | 0.08 | 0.027 | -0.028 | -0.028 | 0.006 | 0.0028 | 0.0037 | 0.017 | -0.0076 | nan | 0.0014 | -0.0084 | 2.9e-05 | 0.0074 | -0.0086 | 0.2 | -2.4e-05 | 0.00048 | -8.3e-05 | -0.00034 | 0.00082 | -0.01 | -0.0038 | -0.00095 | -0.0032 | -0.0047 | -4.4e-05 | 0.00041 | 4.5e-05 | 0.0046 | 0.00028 | 0.00019 | -0.0028 | 0.0023 | -0.00016 | 0.0042 | 0.0024 | 0.006 | -0.0004 | 0.0001 | 0.003 | 0.002 | 0.00083 | -0.0091 | 0.0021 | 0.0021 | 0.0015 | 0.0028 | 0.0018 | 0.0015 | 0.0014 | 0.0015 | 0.00097 | -0.018 | 0.015 | 0.012 | 0.0038 | 0.013 | 0.0044 | -0.026 | 0.00046 | 0.0039 | 0.0023 | 0.026 | 0.00033 | 0.015 | -0.02 |
| OrganizationIdentifier | -0.00019 | -0.005 | -0.0082 | 4.3e-05 | 0.00066 | 0.00056 | -0.00041 | -0.036 | -0.018 | 1 | -0.015 | -0.013 | 0.0021 | -0.0081 | -0.0016 | -0.01 | -0.02 | -0.013 | -0.007 | 0.018 | 0.011 | 0.00088 | -0.008 | 0.0016 | -0.0092 | 0.005 | 0.0096 | 0.009 | 0.0031 | 0.011 | -0.019 | 0.00023 | 0.0097 | -0.0079 | 0.0099 | 0.012 | -0.005 | -0.0081 | -0.0083 | -0.01 | -0.0057 | 0.013 | -0.00045 | 0.0016 | -0.0024 | 0.011 | -0.00042 | 0.0076 | 0.011 | -0.0011 | 0.00011 | -0.00078 | 0.013 | -0.0024 | nan | 0.016 | 0.0058 | 0.013 | 0.0037 | -0.0054 | 0.0022 | -6.9e-05 | 3.7e-06 | 0.00026 | 0.00037 | -0.00055 | -0.006 | -0.003 | -0.0015 | -0.0016 | -0.0037 | 0.0001 | -0.002 | -0.00088 | -0.0014 | -0.00097 | -0.0016 | -0.0012 | -0.0018 | -0.0018 | -0.0032 | -0.0012 | -0.0023 | 0.0021 | -2.9e-05 | 0.0017 | -0.0014 | 0.00041 | -0.011 | -0.0027 | 0.0002 | -0.0012 | -0.00053 | -0.00082 | -0.0024 | 0.00038 | -0.0011 | -0.0033 | 0.0097 | -0.0017 | -0.0004 | 0.00047 | 0.0018 | 0.00013 | 0.001 | 0.00023 | 0.012 | -0.0042 | -0.00064 | -0.00015 | 0.023 | 0.015 |
| GeoNameIdentifier | -0.022 | 0.0013 | -0.026 | 0.004 | 0.0077 | 0.0078 | 0.023 | 0.36 | -0.066 | -0.015 | 1 | 0.46 | -0.028 | -0.028 | -0.024 | -0.063 | 0.084 | -0.064 | -0.04 | -0.088 | -0.0038 | 0.00094 | -0.048 | 0.004 | 0.017 | -0.016 | -0.0043 | -0.012 | -0.017 | -0.0075 | 0.00063 | -0.033 | -0.029 | 0.0042 | 0.0018 | -0.026 | -0.039 | -0.028 | -0.041 | -0.057 | -0.036 | -0.034 | 0.012 | -0.034 | 0.26 | 0.11 | 0.028 | -0.062 | 0.05 | -0.033 | -0.0028 | 0.0097 | -0.029 | 0.11 | nan | -0.02 | 0.022 | 0.0058 | -0.004 | 0.0075 | -0.055 | -0.0018 | -0.00098 | -0.0015 | -0.00092 | -0.0017 | -0.023 | -0.0055 | 0.00068 | -0.013 | -0.0087 | -0.0092 | -0.0042 | 0.0018 | -0.0028 | -0.0055 | -0.011 | -0.017 | -6.1e-05 | 0.0022 | -0.0029 | -0.0066 | -0.0045 | -0.0013 | -0.0028 | -0.0033 | -0.0023 | -0.0022 | -0.032 | -0.0017 | -0.0022 | -0.0081 | -0.00028 | -0.00091 | 0.00019 | -0.0028 | -0.0013 | -0.0061 | 0.046 | -0.0019 | -0.019 | -0.012 | -0.019 | -0.0096 | 0.019 | -0.0003 | -0.066 | 0.017 | 0.027 | -0.00045 | -0.003 | 0.0077 |
| LocaleEnglishNameIdentifier | -0.0054 | -0.00021 | -0.015 | 0.00062 | 0.0038 | 0.0082 | 0.0071 | 0.25 | -0.021 | -0.013 | 0.46 | 1 | -0.015 | -0.00075 | -0.012 | -0.039 | 0.057 | -0.036 | -0.019 | -0.059 | 0.00034 | -0.0019 | -0.029 | 0.011 | 0.018 | -0.013 | -0.011 | -0.0079 | -0.0078 | 0.0009 | 0.011 | -0.0069 | -0.0066 | 0.016 | -0.018 | -0.0069 | -0.031 | -0.0007 | -0.038 | -0.037 | -0.027 | -0.077 | -0.027 | -0.043 | 0.28 | 0.31 | 0.046 | -0.053 | 0.00064 | -0.026 | -0.0021 | 0.0078 | -0.016 | 0.044 | nan | -0.018 | -0.0079 | -0.011 | -0.0042 | -0.0092 | -0.096 | -0.0011 | -0.00074 | -0.0017 | -0.00081 | -0.00093 | -0.015 | -0.0029 | 6.5e-05 | -0.0033 | -0.0039 | -0.0042 | -0.0034 | 0.0013 | -9.5e-06 | -0.0026 | -0.0037 | -0.0098 | 0.00085 | 0.0057 | -8.2e-05 | -0.0029 | -0.001 | 0.0013 | -0.0023 | -0.023 | 0.00035 | -0.0017 | -0.016 | 0.0023 | -0.0017 | -0.0041 | 0.0012 | 0.00069 | 0.0024 | -0.00032 | 0.00089 | 0.00097 | 0.036 | -0.0046 | -0.009 | -0.0073 | -0.014 | -0.0097 | 0.022 | -0.00011 | -0.036 | 0.0064 | 0.019 | -0.00042 | -0.016 | -0.02 |
| Platform | 0.29 | 0.00091 | 0.084 | -0.00091 | -0.024 | 0.026 | -0.29 | -0.048 | 0.015 | 0.0021 | -0.028 | -0.015 | 1 | 0.0022 | 0.87 | 0.46 | 0.08 | 0.57 | 0.3 | 0.089 | -0.035 | -0.0037 | 0.17 | 0.033 | -0.033 | 0.054 | 0.052 | 0.36 | 0.003 | 0.02 | 0.011 | 0.083 | -0.0035 | -0.0055 | 0.036 | 0.023 | 0.19 | 0.0035 | 0.051 | 0.23 | 0.19 | 0.1 | 0.14 | 0.011 | 0.0025 | -0.001 | 0.072 | -0.0043 | -0.14 | -0.028 | 0.0013 | 0.013 | 0.019 | 0.024 | nan | 0.061 | -0.0012 | -0.013 | -0.024 | -0.1 | 0.033 | 0.044 | 0.036 | 0.047 | 0.026 | 0.032 | -0.0096 | -0.0044 | -0.00036 | -0.009 | -0.0061 | 0.00063 | -0.0058 | 0.022 | 0.029 | -0.0058 | 0.00016 | -0.014 | 0.01 | 0.014 | -0.0091 | -0.0084 | -0.012 | -0.0016 | 0.062 | -0.056 | 0.0025 | 0.052 | -0.024 | -0.0006 | 0.062 | -0.056 | 0.014 | -0.0059 | -0.0016 | 0.087 | 0.032 | 0.013 | -0.14 | -0.038 | 0.48 | 0.49 | 0.21 | -0.0042 | 0.023 | -7.2e-05 | 0.013 | -0.02 | 0.0079 | 0.0016 | -0.036 | -0.033 |
| Processor | 0.073 | 0.034 | 0.071 | -0.012 | -0.013 | -0.0043 | -0.067 | -0.0078 | -0.0021 | -0.0081 | -0.028 | -0.00075 | 0.0022 | 1 | 0.008 | 0.09 | -0.091 | 0.095 | 0.21 | 0.076 | -0.0098 | -0.0042 | 0.11 | -0.031 | 0.013 | 0.01 | 0.23 | -0.013 | 0.13 | 0.063 | -0.011 | 0.13 | 0.18 | 0.016 | 0.027 | 0.22 | 0.053 | 0.99 | 0.11 | 0.085 | 0.056 | -0.065 | -0.093 | 0.028 | 0.015 | -0.0099 | 0.043 | 0.015 | -0.12 | 0.004 | 0.0013 | 0.078 | 0.093 | -0.23 | nan | -0.006 | 0.076 | -0.023 | 0.3 | -0.043 | -0.0089 | 0.0009 | -0.0012 | -0.00083 | -0.00035 | 0.0057 | 0.0023 | 0.005 | 0.0036 | 0.0095 | 0.011 | 0.01 | 0.0052 | 0.0068 | 0.016 | 0.0075 | 0.0064 | -0.0039 | 0.011 | 0.0054 | 0.013 | 0.0042 | 0.0049 | 0.0048 | 0.0026 | -0.01 | 0.0071 | 0.0037 | 0.038 | 0.013 | 0.0019 | 0.15 | 0.004 | 0.017 | 0.012 | 0.015 | 0.011 | 0.046 | -0.074 | -0.13 | 0.013 | -0.012 | 0.023 | -0.0023 | -0.053 | -0.00013 | 0.05 | -0.021 | -0.19 | -0.00018 | -0.11 | 0.0088 |
| OsVer | 0.3 | 0.0046 | 0.09 | -0.00052 | -0.022 | 0.029 | -0.29 | -0.046 | 0.012 | -0.0016 | -0.024 | -0.012 | 0.87 | 0.008 | 1 | 0.45 | 0.027 | 0.56 | 0.26 | 0.027 | -0.039 | -0.0034 | 0.17 | -0.013 | -0.0065 | 0.052 | -0.017 | -0.0062 | -0.0027 | 0.017 | 0.015 | 0.05 | -0.025 | -0.0059 | -0.0059 | -0.019 | 0.14 | 0.0092 | 0.057 | 0.21 | 0.14 | -0.0095 | 0.026 | 0.019 | 0.0071 | 0.0043 | 0.004 | -0.0069 | -0.15 | -0.025 | 0.0014 | 0.0012 | 0.0066 | 0.034 | nan | -0.011 | 0.0015 | -0.014 | -0.02 | -0.093 | 0.028 | 0.044 | 0.036 | 0.047 | 0.026 | 0.068 | -0.0087 | -0.0039 | -0.0002 | -0.0082 | -0.0055 | 0.0012 | -0.005 | 0.023 | 0.029 | -0.0054 | 0.00073 | -0.013 | 0.011 | 0.015 | -0.0081 | -0.0087 | -0.013 | -0.0015 | 0.062 | -0.052 | 0.0022 | 0.052 | -0.023 | -0.0021 | 0.062 | -0.051 | -0.0071 | -0.0091 | -0.004 | 0.082 | 0.027 | 0.011 | -0.13 | -0.032 | 0.46 | 0.46 | 0.18 | -0.0039 | -0.00044 | -6.7e-05 | 0.0031 | -0.018 | -0.015 | 0.0016 | -0.037 | -0.027 |
| OsBuild | 0.37 | 0.1 | 0.39 | -0.019 | -0.038 | 0.029 | -0.35 | -0.077 | 0.019 | -0.01 | -0.063 | -0.039 | 0.46 | 0.09 | 0.45 | 1 | 0.041 | 0.93 | 0.65 | 0.096 | -0.11 | -0.012 | 0.74 | -0.017 | 0.0011 | 0.058 | 0.032 | 0.036 | 0.023 | 0.004 | 0.017 | 0.035 | 0.016 | -0.0012 | 0.019 | -0.012 | 0.55 | 0.09 | 0.56 | 0.86 | 0.5 | 0.048 | 0.072 | 0.12 | -0.037 | -0.048 | -0.033 | 0.058 | -0.008 | 0.25 | 0.037 | 0.03 | 0.0035 | 0.0057 | nan | 0.01 | 0.026 | -0.024 | 0.057 | -0.13 | 0.054 | 0.023 | 0.016 | 0.02 | 0.014 | 0.044 | 0.12 | 0.053 | 0.027 | 0.087 | 0.06 | 0.061 | 0.08 | 0.053 | 0.084 | 0.055 | 0.062 | 0.077 | 0.064 | 0.066 | 0.097 | 0.053 | 0.069 | 0.0082 | 0.036 | 0.13 | 0.066 | 0.037 | 0.3 | 0.09 | 0.036 | 0.055 | 0.039 | 0.052 | 0.054 | 0.063 | 0.055 | 0.17 | -0.59 | -0.037 | 0.29 | 0.22 | 0.25 | 0.15 | -0.0063 | -0.00027 | -0.0042 | -0.013 | -0.091 | 0.00051 | -0.11 | -0.0029 |
| OsSuite | -0.028 | 0.032 | 0.021 | 0.012 | 0.015 | 0.021 | 0.024 | -0.0073 | 0.01 | -0.02 | 0.084 | 0.057 | 0.08 | -0.091 | 0.027 | 0.041 | 1 | 0.056 | 0.034 | -0.77 | -0.028 | -0.0015 | 0.017 | -0.017 | 0.049 | -0.036 | 0.018 | 0.14 | -0.012 | -0.091 | 0.033 | -0.13 | -0.16 | -0.041 | 0.0097 | -0.16 | 0.075 | -0.09 | 0.063 | 0.041 | 0.085 | -0.14 | 0.5 | 0.17 | 0.055 | 0.033 | 0.13 | -0.21 | 0.16 | -0.057 | -0.0075 | 0.02 | -0.082 | 0.45 | nan | -0.038 | 0.091 | -0.053 | 0.056 | -0.017 | -0.028 | 0.0038 | 0.0045 | 0.0044 | 0.0021 | 0.00042 | -0.055 | -0.013 | 0.0034 | -0.026 | -0.0047 | -0.01 | 0.0042 | 0.0092 | 0.0055 | 0.0004 | -0.017 | -0.0081 | 0.016 | 0.014 | 0.026 | 0.0032 | 0.018 | 0.00067 | 0.00028 | 0.068 | 0.015 | -0.00093 | -0.059 | 0.029 | 0.00028 | 0.01 | 0.022 | 0.018 | 0.019 | 0.0013 | 0.018 | 0.052 | -0.12 | 0.088 | 0.011 | 0.027 | -0.0077 | -0.017 | -0.026 | 0.0003 | -0.17 | 0.064 | -0.053 | 0.00042 | -0.099 | 0.074 |
| OsPlatformSubRelease | 0.4 | 0.11 | 0.41 | -0.021 | -0.042 | 0.033 | -0.38 | -0.08 | 0.018 | -0.013 | -0.064 | -0.036 | 0.57 | 0.095 | 0.56 | 0.93 | 0.056 | 1 | 0.61 | 0.094 | -0.12 | -0.015 | 0.68 | -0.02 | 0.0007 | 0.062 | 0.031 | 0.039 | 0.023 | 0.0021 | 0.018 | 0.035 | 0.0076 | -0.0028 | 0.019 | -0.02 | 0.53 | 0.096 | 0.56 | 0.78 | 0.52 | 0.053 | 0.086 | 0.15 | -0.039 | -0.05 | -0.035 | 0.063 | -0.011 | 0.13 | 0.013 | 0.033 | -0.0011 | 0.02 | nan | 0.0035 | 0.028 | -0.029 | 0.06 | -0.15 | 0.051 | 0.029 | 0.021 | 0.026 | 0.017 | 0.029 | 0.13 | 0.057 | 0.03 | 0.094 | 0.065 | 0.067 | 0.087 | 0.059 | 0.091 | 0.059 | 0.068 | 0.083 | 0.071 | 0.074 | 0.1 | 0.057 | 0.074 | 0.0089 | 0.043 | 0.14 | 0.072 | 0.043 | 0.33 | 0.099 | 0.044 | 0.062 | 0.042 | 0.057 | 0.059 | 0.074 | 0.062 | 0.19 | -0.63 | -0.041 | 0.34 | 0.27 | 0.22 | 0.061 | -0.0055 | -0.0003 | -0.0096 | -0.014 | -0.1 | 0.00072 | -0.13 | -0.0054 |
| OsBuildLab | 0.28 | 0.13 | 0.43 | -0.015 | -0.024 | 0.024 | -0.27 | -0.041 | 0.014 | -0.007 | -0.04 | -0.019 | 0.3 | 0.21 | 0.26 | 0.65 | 0.034 | 0.61 | 1 | 0.12 | -0.11 | -0.0069 | 0.66 | -0.0011 | -0.0066 | 0.054 | 0.099 | 0.13 | 0.044 | 0.015 | 0.0043 | 0.054 | 0.058 | -0.0013 | 0.032 | 0.053 | 0.59 | 0.21 | 0.28 | 0.56 | 0.56 | 0.073 | 0.07 | 0.089 | -0.025 | -0.031 | -0.0056 | 0.022 | -0.018 | 0.13 | 0.023 | 0.037 | 0.027 | -0.03 | nan | 0.026 | 0.043 | -0.012 | 0.1 | -0.091 | 0.041 | 0.068 | 0.044 | 0.065 | 0.043 | 0.067 | 0.04 | 0.15 | 0.1 | 0.067 | 0.12 | 0.078 | 0.076 | 0.1 | 0.04 | 0.031 | 0.066 | -0.0069 | 0.13 | 0.16 | 0.19 | 0.087 | 0.14 | 0.076 | 0.037 | 0.021 | 0.14 | 0.074 | 0.11 | 0.17 | 0.089 | -0.018 | 0.08 | 0.13 | 0.1 | 0.099 | 0.1 | 0.07 | -0.33 | -0.12 | 0.085 | 0.1 | 0.21 | 0.14 | -0.0096 | -0.00016 | 0.0069 | -0.014 | -0.085 | -8.8e-05 | -0.086 | 0.0056 |
| SkuEdition | 0.2 | -0.032 | 0.05 | -0.015 | -0.02 | -0.012 | -0.19 | -0.0056 | -0.0027 | 0.018 | -0.088 | -0.059 | 0.089 | 0.076 | 0.027 | 0.096 | -0.77 | 0.094 | 0.12 | 1 | 0.024 | -0.00048 | 0.11 | 0.061 | -0.063 | 0.066 | 0.028 | 0.17 | 0.028 | 0.079 | -0.025 | 0.14 | 0.16 | 0.034 | 0.022 | 0.15 | 0.04 | 0.076 | -0.022 | 0.063 | 0.034 | 0.44 | -0.09 | -0.15 | -0.057 | -0.039 | -0.083 | 0.19 | -0.081 | 0.08 | 0.0056 | -0.0048 | 0.08 | -0.4 | nan | 0.1 | -0.083 | 0.037 | -0.057 | -0.00042 | 0.044 | -6e-05 | -0.0035 | -0.004 | -0.00025 | 0.01 | 0.095 | 0.012 | 0.00026 | 0.016 | 0.0011 | 0.0076 | -0.0041 | -0.006 | 0.018 | -0.003 | 0.01 | 0.0043 | -0.015 | -0.01 | -0.023 | -0.004 | -0.018 | -0.0013 | 0.0058 | -0.077 | -0.013 | 0.01 | 0.12 | -0.026 | 0.0072 | -0.034 | -0.0074 | -0.013 | -0.013 | 0.022 | -0.0064 | -0.044 | 0.058 | -0.087 | 0.074 | 0.0068 | 0.13 | 0.042 | 0.05 | -0.00026 | 0.16 | -0.057 | 0.056 | -0.00037 | 0.071 | -0.076 |
| IsProtected | 0.0095 | -0.23 | -0.2 | -0.14 | -0.035 | -0.068 | -0.0091 | -0.034 | 0.022 | 0.011 | -0.0038 | 0.00034 | -0.035 | -0.0098 | -0.039 | -0.11 | -0.028 | -0.12 | -0.11 | 0.024 | 1 | 0.0039 | -0.11 | -0.00041 | 0.0055 | 0.0022 | 0.025 | 0.0095 | 0.0085 | 0.017 | -0.0019 | 0.018 | 0.048 | -0.0086 | 0.0027 | 0.042 | -0.11 | -0.0099 | -0.12 | -0.11 | -0.11 | -0.025 | -0.027 | -0.033 | 0.046 | 0.046 | 0.013 | -0.0063 | -0.029 | -0.021 | 0.00058 | -0.01 | 0.021 | -0.038 | nan | 0.012 | -0.0018 | 0.012 | 0.0048 | 0.034 | 0.0076 | -0.03 | -0.025 | -0.032 | -0.011 | -0.004 | -0.036 | -0.015 | -0.0083 | -0.013 | -0.023 | -0.011 | -0.023 | -0.026 | -0.014 | -0.019 | -0.014 | -0.018 | -0.029 | -0.03 | -0.036 | -0.022 | -0.033 | -0.00082 | -0.015 | -0.051 | -0.028 | -0.0077 | -0.046 | -0.039 | -0.013 | -0.058 | -0.013 | -0.019 | -0.014 | -0.011 | -0.015 | -0.0021 | 0.12 | 0.023 | 0.0018 | 0.0005 | 0.02 | 0.0052 | -0.01 | 9.6e-05 | 0.047 | -0.038 | 0.037 | 0.00013 | 0.052 | 0.0043 |
| SMode | -0.0021 | -0.00068 | -0.0093 | -0.0026 | -0.0028 | -0.0024 | 0.0023 | 0.0018 | 0.0036 | 0.00088 | 0.00094 | -0.0019 | -0.0037 | -0.0042 | -0.0034 | -0.012 | -0.0015 | -0.015 | -0.0069 | -0.00048 | 0.0039 | 1 | -0.0099 | -0.0016 | 0.0026 | -0.0014 | 0.0035 | -0.00083 | 1.5e-05 | -0.0009 | -0.0043 | 0.0051 | 0.015 | -0.0055 | 0.015 | -0.0038 | -0.0077 | -0.0041 | -0.016 | -0.013 | -0.008 | -0.0084 | -0.0061 | 0.012 | -0.0023 | 0.0005 | -0.008 | 0.0016 | 0.0063 | 0.0084 | -0.0002 | 0.007 | 0.00037 | 0.017 | nan | -0.0013 | 0.016 | 0.024 | 0.015 | -0.0019 | 0.01 | -0.00018 | -0.00013 | -0.00017 | -0.0001 | -0.00016 | -0.0013 | -0.00056 | -0.00028 | -0.0012 | -0.00085 | -0.00084 | -0.0011 | -0.00064 | -0.00083 | -0.00078 | -0.00086 | -0.0017 | -0.00095 | -0.001 | -0.0016 | -0.0011 | -0.0016 | -0.00019 | -0.00026 | -0.0066 | -0.0012 | -0.00025 | -0.0034 | -0.0017 | -0.00027 | -0.0091 | -0.00089 | -0.0012 | -0.0011 | -0.00052 | -0.001 | -0.0036 | 0.023 | -0.0084 | -0.0019 | -0.0017 | -0.0016 | 0.00051 | -0.0049 | -8.3e-06 | 0.013 | -0.0026 | 0.0015 | -1.2e-05 | 0.0028 | 0.019 |
| IeVerIdentifier | 0.38 | 0.11 | 0.47 | -0.022 | -0.034 | 0.026 | -0.36 | -0.054 | 0.014 | -0.008 | -0.048 | -0.029 | 0.17 | 0.11 | 0.17 | 0.74 | 0.017 | 0.68 | 0.66 | 0.11 | -0.11 | -0.0099 | 1 | -0.022 | 0.0051 | 0.068 | 0.035 | 0.023 | 0.024 | 0.0062 | 0.0076 | 0.024 | 0.035 | 0.0011 | 0.016 | 0.0026 | 0.52 | 0.11 | 0.57 | 0.66 | 0.47 | 0.039 | 0.04 | 0.11 | -0.038 | -0.041 | -0.021 | 0.05 | 0.016 | 0.2 | 0.029 | 0.029 | 0.0064 | -0.023 | nan | 0.011 | 0.022 | -0.018 | 0.061 | -0.099 | 0.043 | 0.099 | 0.064 | 0.061 | 0.044 | 0.075 | 0.072 | 0.064 | 0.037 | 0.13 | 0.077 | 0.057 | 0.071 | 0.056 | 0.095 | 0.039 | 0.095 | -0.037 | 0.039 | 0.043 | 0.16 | 0.0021 | -0.034 | 0.077 | 0.054 | 0.17 | 0.096 | 0.077 | 0.2 | 0.12 | 0.11 | 0.14 | 0.072 | 0.097 | 0.034 | 0.054 | 0.033 | 0.095 | -0.47 | -0.093 | 0.1 | 0.018 | 0.16 | 0.16 | -0.015 | -0.00022 | 0.0015 | -0.011 | -0.085 | 4.4e-06 | -0.094 | 0.0034 |
| SmartScreen | -0.028 | -0.021 | -0.016 | 0.02 | 0.028 | -0.0034 | 0.026 | 0.0062 | -0.0098 | 0.0016 | 0.004 | 0.011 | 0.033 | -0.031 | -0.013 | -0.017 | -0.017 | -0.02 | -0.0011 | 0.061 | -0.00041 | -0.0016 | -0.022 | 1 | -0.041 | 0.0046 | 0.0071 | 0.12 | -0.00057 | -0.0056 | -0.0042 | 0.0077 | -2.2e-06 | 0.032 | 0.0037 | 0.0068 | -0.001 | -0.031 | -0.027 | -0.01 | -0.00086 | 0.073 | 0.039 | -0.061 | 0.0043 | -0.0049 | 0.039 | -0.0098 | 0.019 | 0.013 | 0.0029 | -0.0057 | 0.0017 | -0.026 | nan | 0.022 | -0.014 | -0.0078 | -0.026 | 0.016 | -0.029 | -0.0013 | -0.00069 | -0.00091 | -0.00042 | -0.0012 | 0.0028 | 0.0022 | -0.00023 | 0.00043 | 0.00016 | -0.0015 | -0.0016 | -0.003 | -0.0061 | -0.0022 | -0.00063 | 0.0014 | -0.0049 | -0.0053 | -0.0088 | -0.0019 | -0.0056 | 0.00077 | -0.00075 | -0.025 | -0.0043 | -0.0021 | 0.0077 | -0.0069 | -0.00039 | -0.014 | -0.0023 | -0.0021 | -0.0021 | 0.0015 | 0.0014 | -0.0067 | 0.018 | 0.013 | 0.0032 | 0.012 | 0.0028 | 0.0012 | 0.8 | -0.00014 | 0.027 | -0.017 | 0.033 | 0.0002 | 0.0052 | -0.017 |
| Firewall | 0.0096 | 0.0044 | -0.0013 | -0.0091 | -0.0019 | -0.021 | -0.0092 | 0.013 | 0.0088 | -0.0092 | 0.017 | 0.018 | -0.033 | 0.013 | -0.0065 | 0.0011 | 0.049 | 0.0007 | -0.0066 | -0.063 | 0.0055 | 0.0026 | 0.0051 | -0.041 | 1 | -0.037 | -0.026 | -0.075 | -0.0067 | -0.011 | 0.016 | -0.015 | -0.0086 | 0.00098 | -0.0094 | -0.046 | -0.0041 | 0.013 | 0.0097 | 0.00042 | -0.0023 | -0.048 | -0.0043 | 0.024 | 0.03 | 0.028 | -0.00047 | -0.021 | -0.015 | -0.0023 | -5.2e-05 | 0.0061 | -0.015 | 0.031 | nan | -0.043 | 0.016 | 0.0098 | 0.019 | 0.0057 | 0.0088 | -0.00084 | -0.00048 | -0.00061 | -0.0006 | 0.00017 | -0.0034 | -0.00043 | 0.00034 | -0.0033 | 0.00049 | -0.0015 | -0.00046 | -0.0011 | 0.0017 | 0.00018 | -0.00045 | -0.0032 | 0.0019 | 0.00081 | 0.0025 | 0.00022 | 0.0031 | -0.00027 | -0.0025 | 0.0046 | 0.0014 | -0.00025 | -0.0047 | 0.0037 | -0.0021 | 0.01 | -0.0023 | 0.0021 | 0.0026 | -0.0012 | 0.00053 | 0.0055 | -0.014 | 0.011 | -0.0077 | -0.0098 | -0.008 | -0.00058 | -0.03 | 5.9e-05 | 0.0031 | 0.0064 | -0.014 | 8.1e-05 | -0.007 | 0.03 |
| UacLuaenable | 0.17 | -0.0049 | 0.015 | 0.0044 | 6.6e-05 | 0.009 | -0.16 | -0.013 | 0.0013 | 0.005 | -0.016 | -0.013 | 0.054 | 0.01 | 0.052 | 0.058 | -0.036 | 0.062 | 0.054 | 0.066 | 0.0022 | -0.0014 | 0.068 | 0.0046 | -0.037 | 1 | 0.0041 | 0.0085 | 0.01 | 0.014 | -0.0048 | 0.015 | 0.015 | -0.0016 | -0.0042 | 0.021 | 0.015 | 0.011 | -0.0024 | 0.02 | 0.015 | 0.013 | -0.013 | -0.0088 | -0.00065 | 0.0017 | -0.0018 | 0.0067 | -0.015 | -0.0011 | 0.0005 | -0.001 | 0.019 | -0.035 | nan | 0.0049 | -0.011 | -0.0057 | -0.0097 | -0.0077 | 0.0089 | 0.0046 | 0.00079 | -0.00012 | 0.0041 | 0.0086 | -0.0021 | -0.0013 | 0.00016 | -0.0011 | -0.00044 | 0.0023 | -5.3e-05 | 0.0017 | 0.022 | -0.00093 | -0.0021 | -0.0031 | -0.0025 | 0.0021 | -0.00013 | -0.0018 | -0.0033 | 0.0033 | 0.011 | -0.0085 | -0.00015 | 0.015 | -0.008 | -0.0028 | 0.0094 | -0.011 | -0.0017 | -0.0022 | 0.00076 | 0.016 | -0.0019 | -0.0045 | -0.0045 | -0.0072 | 0.043 | 0.0017 | 0.066 | 0.0028 | -0.004 | -3.1e-05 | 0.0082 | -0.0033 | 0.0034 | -4.3e-05 | 0.0054 | -0.017 |
| Census_MDC2FormFactor | -0.021 | -0.013 | -0.0026 | -0.015 | -0.015 | -0.017 | 0.019 | -0.028 | 0.009 | 0.0096 | -0.0043 | -0.011 | 0.052 | 0.23 | -0.017 | 0.032 | 0.018 | 0.031 | 0.099 | 0.028 | 0.025 | 0.0035 | 0.035 | 0.0071 | -0.026 | 0.0041 | 1 | 0.19 | 0.17 | 0.056 | -0.053 | 0.081 | 0.24 | -0.057 | 0.27 | 0.65 | 0.059 | 0.23 | 0.032 | 0.043 | 0.058 | 0.011 | 0.022 | 0.055 | -0.0012 | -0.0076 | 0.058 | -0.02 | 0.082 | 0.0051 | 0.0012 | 0.037 | 0.099 | -0.019 | nan | 0.049 | 0.51 | 0.33 | 0.66 | -0.018 | 0.034 | -0.0012 | -0.0016 | -0.00093 | -0.00014 | -0.0018 | -0.014 | 0.00016 | 0.0041 | 0.0024 | 0.0052 | 0.0042 | 0.0084 | 0.0068 | 0.0031 | -0.00043 | 0.0032 | -0.00081 | 0.0057 | -0.0026 | 0.011 | 0.0048 | -0.0024 | 0.013 | -0.0015 | 0.0059 | 0.0015 | -0.002 | 0.00011 | 0.0098 | -0.002 | 0.034 | 0.014 | 0.008 | 0.0096 | 0.002 | 0.014 | 0.041 | -0.012 | -0.065 | 0.0048 | 0.0083 | 0.015 | 0.0014 | -0.022 | -0.00022 | 0.087 | -0.03 | -0.11 | -0.00031 | 0.2 | 0.28 |
| Census_DeviceFamily | -0.0038 | -0.011 | -0.013 | -0.00012 | -0.0053 | -0.0046 | -0.015 | -0.008 | 0.0062 | 0.009 | -0.012 | -0.0079 | 0.36 | -0.013 | -0.0062 | 0.036 | 0.14 | 0.039 | 0.13 | 0.17 | 0.0095 | -0.00083 | 0.023 | 0.12 | -0.075 | 0.0085 | 0.19 | 1 | 0.019 | 0.01 | -0.0099 | 0.09 | 0.059 | 0.0029 | 0.11 | 0.12 | 0.13 | -0.013 | -0.016 | 0.047 | 0.13 | 0.31 | 0.32 | -0.021 | -0.011 | -0.013 | 0.18 | 0.0079 | 0.023 | -0.0087 | -0.00039 | 0.036 | 0.034 | -0.032 | nan | 0.2 | -0.0099 | 0.0015 | -0.0098 | -0.024 | 0.014 | -0.00034 | -0.00025 | -0.00032 | -0.0002 | -0.00032 | -0.0025 | -0.0011 | -0.00054 | -0.0024 | -0.0016 | -0.0016 | -0.0021 | -0.00099 | -0.0016 | -0.0015 | -0.0017 | -0.002 | -0.0018 | -0.0017 | -0.003 | 0.00087 | -0.00011 | -0.00036 | -0.0005 | -0.012 | 0.00062 | -0.00048 | -0.0029 | 0.0039 | -0.00052 | -0.016 | 0.057 | 0.0096 | 0.0068 | 0.014 | 0.015 | 0.0049 | -0.018 | -0.016 | 0.049 | 0.073 | 0.077 | -0.00094 | 0.066 | -1.6e-05 | 0.026 | -0.0054 | 0.062 | -2.2e-05 | 0.0013 | -0.019 |
| Census_OEMNameIdentifier | 0.0063 | -0.0016 | 0.0021 | -0.0074 | -0.0074 | -0.0071 | -0.0056 | -0.028 | 0.012 | 0.0031 | -0.017 | -0.0078 | 0.003 | 0.13 | -0.0027 | 0.023 | -0.012 | 0.023 | 0.044 | 0.028 | 0.0085 | 1.5e-05 | 0.024 | -0.00057 | -0.0067 | 0.01 | 0.17 | 0.019 | 1 | 0.15 | -0.031 | 0.041 | 0.098 | -0.007 | 0.027 | 0.14 | 0.025 | 0.13 | 0.023 | 0.025 | 0.024 | -0.0067 | -0.012 | 0.025 | 0.025 | 0.021 | 0.011 | 0.013 | -0.0041 | 0.0042 | 0.0003 | 0.15 | 0.21 | -0.12 | nan | 0.025 | 0.055 | 0.0051 | 0.17 | 0.0024 | 0.011 | -0.00084 | -0.0011 | -0.0011 | -0.0005 | 0.00028 | -0.00044 | 0.00064 | 0.0026 | 0.0028 | 0.0024 | 0.0071 | 0.0025 | 0.002 | 0.0033 | 0.0015 | 0.0014 | 0.0034 | 0.0011 | -0.0023 | 0.0019 | 0.0024 | 0.00079 | 0.013 | 0.00019 | 0.01 | 0.0014 | -0.00064 | 0.0049 | 0.0019 | -0.00015 | 0.017 | 0.0015 | 0.0012 | 0.0018 | 0.0022 | 0.0024 | 0.016 | -0.017 | -0.024 | 0.0047 | 0.00049 | 0.013 | 0.00092 | -0.015 | -5.7e-05 | 0.056 | -0.017 | -0.051 | -9.5e-05 | 0.015 | 0.04 |
| Census_OEMModelIdentifier | 0.037 | -0.00088 | -0.0011 | -0.0028 | -0.002 | 0.0095 | -0.038 | -0.013 | 0.022 | 0.011 | -0.0075 | 0.0009 | 0.02 | 0.063 | 0.017 | 0.004 | -0.091 | 0.0021 | 0.015 | 0.079 | 0.017 | -0.0009 | 0.0062 | -0.0056 | -0.011 | 0.014 | 0.056 | 0.01 | 0.15 | 1 | -0.015 | 0.1 | 0.066 | 0.007 | -0.002 | 0.052 | -0.0015 | 0.063 | -0.015 | -0.0084 | -0.0026 | -0.021 | -0.061 | -0.014 | 0.05 | 0.045 | 0.011 | -0.0002 | -0.075 | 0.0068 | 0.0017 | 0.0086 | 0.29 | -0.098 | nan | -0.028 | 0.034 | -0.0077 | -0.014 | -0.04 | 0.078 | -0.00064 | -0.00087 | 0.00018 | 5.6e-05 | 0.0022 | -0.0032 | -0.002 | -0.00047 | 5.9e-05 | -0.0033 | 0.002 | -0.0022 | -0.00084 | 0.0052 | -0.00039 | 0.00091 | -0.0041 | -0.00084 | -0.0038 | -0.0035 | -0.0002 | 0.0014 | 0.0041 | 0.001 | -0.015 | -0.0011 | 0.0015 | -0.0096 | -0.0047 | 0.0017 | 0.0068 | -0.0018 | -0.0015 | -0.0036 | 0.0041 | -0.0029 | -0.0092 | 0.016 | -0.016 | 0.015 | 0.0046 | 0.021 | 0.0028 | -0.017 | -5.4e-05 | 0.043 | -0.017 | 0.0036 | -0.00014 | 0.025 | -0.014 |
| Census_ProcessorManufacturerIdentifier | 0.0082 | 0.011 | 0.011 | 0.00034 | -0.0018 | 0.0038 | -0.0059 | -0.021 | 0.031 | -0.019 | 0.00063 | 0.011 | 0.011 | -0.011 | 0.015 | 0.017 | 0.033 | 0.018 | 0.0043 | -0.025 | -0.0019 | -0.0043 | 0.0076 | -0.0042 | 0.016 | -0.0048 | -0.053 | -0.0099 | -0.031 | -0.015 | 1 | 0.37 | 0.026 | 0.00025 | -0.08 | -0.029 | 0.0065 | -0.012 | 0.0086 | 0.013 | 0.0061 | -0.038 | -0.00047 | 0.014 | 0.037 | -0.0028 | -0.0032 | 0.0015 | -0.061 | -0.0047 | 0.00036 | 0.00033 | -0.03 | -0.021 | nan | -0.02 | -0.075 | -0.058 | -0.088 | 0.023 | -0.017 | 0.00025 | 0.00063 | 0.00011 | 9e-05 | -1.2e-05 | -0.00043 | 0.00017 | -0.0012 | 0.0024 | -0.0024 | 0.0021 | 0.00073 | 0.0017 | 0.0049 | 0.0016 | 0.0062 | 0.0018 | 0.0052 | 0.0057 | 0.0004 | 0.0029 | 0.0093 | -0.0017 | 5.3e-05 | 0.0028 | 0.0028 | 0.001 | -0.0027 | 0.0013 | 0.00087 | -0.0052 | 0.0006 | -0.00015 | -0.0027 | 0.001 | -0.0019 | -0.0014 | -0.028 | 0.033 | 0.0055 | 0.0055 | 0.0048 | 0.0017 | -0.0076 | -0.00015 | -0.023 | 0.012 | 0.14 | 0.00067 | -0.057 | -0.14 |
| Census_ProcessorModelIdentifier | 0.046 | 0.0063 | 0.013 | -0.0062 | -0.0095 | -0.002 | -0.045 | -0.032 | 0.034 | 0.00023 | -0.033 | -0.0069 | 0.083 | 0.13 | 0.05 | 0.035 | -0.13 | 0.035 | 0.054 | 0.14 | 0.018 | 0.0051 | 0.024 | 0.0077 | -0.015 | 0.015 | 0.081 | 0.09 | 0.041 | 0.1 | 0.37 | 1 | 0.17 | 0.028 | 0.023 | 0.17 | 0.015 | 0.13 | -0.014 | 0.016 | 0.012 | -0.0058 | -0.068 | -0.035 | 0.035 | 0.018 | -0.0054 | 0.056 | -0.2 | 0.016 | 0.004 | 0.023 | 0.11 | -0.2 | nan | 0.084 | -0.029 | 0.0079 | -0.033 | -0.012 | 0.032 | 0.0025 | 0.0014 | 0.0004 | 0.0013 | 0.0035 | 0.0044 | 0.0017 | -0.0013 | 0.0087 | -0.0017 | 0.0084 | -0.00086 | 0.00093 | 0.0078 | 0.0027 | 0.0088 | -0.00025 | 0.0035 | -0.0012 | -0.003 | 0.0026 | 0.0025 | -0.0015 | 0.0037 | -0.036 | 0.0012 | 0.0046 | 0.0053 | -0.003 | 0.0044 | 0.0059 | 0.0033 | -0.0016 | -0.0053 | 0.0081 | -0.0017 | -0.014 | 0.013 | -0.027 | 0.036 | 0.029 | 0.036 | 0.007 | -0.014 | -0.00023 | 0.071 | -0.026 | 0.1 | 0.00034 | 0.04 | -0.1 |
| Census_PrimaryDiskTypeName | 0.03 | -0.018 | -0.0018 | -0.012 | -0.0057 | -0.015 | -0.029 | -0.028 | 0.033 | 0.0097 | -0.029 | -0.0066 | -0.0035 | 0.18 | -0.025 | 0.016 | -0.16 | 0.0076 | 0.058 | 0.16 | 0.048 | 0.015 | 0.035 | -2.2e-06 | -0.0086 | 0.015 | 0.24 | 0.059 | 0.098 | 0.066 | 0.026 | 0.17 | 1 | -0.0061 | 0.065 | 0.33 | 0.016 | 0.19 | 3.3e-05 | 0.017 | 0.011 | -0.035 | -0.095 | -0.0016 | 0.055 | 0.074 | -0.016 | 0.074 | -0.035 | 0.031 | 0.0047 | 0.01 | 0.11 | -0.19 | nan | 0.15 | 0.075 | 0.1 | 0.19 | 0.01 | 0.073 | -0.0018 | -0.0027 | -0.0022 | -0.0003 | 0.00022 | 0.002 | 0.00076 | 0.0013 | 0.023 | 0.007 | 0.015 | 0.013 | 0.0021 | 0.0089 | 0.0065 | 0.014 | -0.0036 | 0.0073 | -0.00069 | 0.005 | -0.0016 | -0.01 | 0.0041 | -0.00099 | -0.018 | -0.0033 | -0.0014 | 0.015 | -0.00022 | -0.00099 | 0.015 | 6.8e-05 | 0.0002 | 0.0034 | 0.0036 | 0.0039 | 0.0046 | 0.037 | -0.08 | -0.0022 | -0.015 | 0.024 | 0.01 | -0.033 | -0.00025 | 0.11 | -0.039 | 0.042 | -0.00036 | 0.18 | 0.11 |
| Census_HasOpticalDiskDrive | -0.0049 | 0.0026 | 0.0038 | 0.0029 | 0.0086 | 0.0076 | 0.0051 | 0.025 | -0.0078 | -0.0079 | 0.0042 | 0.016 | -0.0055 | 0.016 | -0.0059 | -0.0012 | -0.041 | -0.0028 | -0.0013 | 0.034 | -0.0086 | -0.0055 | 0.0011 | 0.032 | 0.00098 | -0.0016 | -0.057 | 0.0029 | -0.007 | 0.007 | 0.00025 | 0.028 | -0.0061 | 1 | -0.027 | -0.016 | -0.0087 | 0.016 | 0.0016 | -0.00025 | -0.0059 | -0.0022 | -0.026 | -0.087 | 0.012 | 0.0096 | 0.065 | -0.03 | -0.074 | -0.0012 | 0.0037 | -0.014 | 0.005 | -0.06 | nan | -0.019 | -0.057 | -0.038 | -0.057 | 0.044 | -0.0045 | 0.00015 | -0.00097 | -0.00084 | -0.0006 | -0.00098 | 0.0082 | 0.003 | 0.0014 | -0.012 | -0.007 | -0.0072 | -0.0094 | -0.0041 | -0.0036 | -0.0041 | -0.00085 | -0.014 | 0.0015 | -0.0026 | 0.00093 | -0.0032 | -0.0023 | -0.0016 | -0.00081 | -0.021 | 0.0029 | -0.00081 | 0.014 | 0.0035 | -0.00072 | 0.0073 | -1.9e-05 | 0.0016 | 0.00075 | 4.3e-05 | 0.00013 | -0.0047 | -1.6e-06 | 0.015 | -0.0014 | -0.0016 | -0.0044 | 0.0026 | 0.037 | -0.00012 | -0.027 | 0.0043 | 0.015 | 0.0009 | -0.013 | -0.055 |
| Census_ChassisTypeName | -0.006 | 0.0069 | 0.0083 | -0.0059 | -0.0081 | -0.008 | 0.00077 | -0.0053 | -0.011 | 0.0099 | 0.0018 | -0.018 | 0.036 | 0.027 | -0.0059 | 0.019 | 0.0097 | 0.019 | 0.032 | 0.022 | 0.0027 | 0.015 | 0.016 | 0.0037 | -0.0094 | -0.0042 | 0.27 | 0.11 | 0.027 | -0.002 | -0.08 | 0.023 | 0.065 | -0.027 | 1 | 0.15 | 0.041 | 0.027 | 0.03 | 0.024 | 0.041 | 0.051 | 0.038 | 0.027 | -0.029 | -0.034 | 0.023 | 0.047 | 0.051 | 0.0072 | 0.00067 | 0.037 | 0.055 | 0.054 | nan | 0.18 | 0.27 | 0.26 | 0.18 | -0.068 | 0.00087 | -0.00021 | -0.0016 | -0.00032 | 0.00021 | -0.0003 | -0.00078 | -0.00074 | -0.0015 | -0.0019 | -0.0046 | -0.00058 | -0.0023 | 0.0038 | -0.0023 | 0.00015 | 0.0002 | 5.3e-06 | 0.00044 | -0.0024 | 0.002 | 0.0025 | 0.0029 | 0.0095 | -0.00075 | 0.021 | 0.0014 | -0.0012 | -0.0016 | 0.0039 | -0.0012 | 0.022 | 0.0087 | 0.004 | 0.0039 | 0.0015 | 0.0047 | 0.009 | -0.037 | -0.00032 | 0.0053 | 0.0063 | 0.0071 | -0.00096 | -0.0021 | -0.00012 | 0.036 | -0.0098 | -0.036 | -0.00017 | 0.088 | 0.22 |
| Census_PowerPlatformRoleName | 0.014 | -0.022 | -0.022 | -0.013 | -0.011 | -0.013 | -0.013 | -0.034 | 0.027 | 0.012 | -0.026 | -0.0069 | 0.023 | 0.22 | -0.019 | -0.012 | -0.16 | -0.02 | 0.053 | 0.15 | 0.042 | -0.0038 | 0.0026 | 0.0068 | -0.046 | 0.021 | 0.65 | 0.12 | 0.14 | 0.052 | -0.029 | 0.17 | 0.33 | -0.016 | 0.15 | 1 | -0.0034 | 0.22 | -0.032 | -0.012 | -0.0067 | -0.0069 | -0.092 | -0.0047 | 0.021 | 0.025 | 0.026 | 0.055 | -0.032 | 0.0097 | 0.0022 | 0.00028 | 0.083 | -0.2 | nan | 0.037 | 0.17 | 0.17 | 0.46 | 0.026 | 0.049 | -0.0015 | -0.0024 | -0.0023 | -0.00083 | -0.00053 | -0.0069 | -0.0019 | -0.00039 | 0.0021 | 0.00097 | 0.0026 | 0.0015 | 0.0022 | 0.0015 | -0.0054 | 0.0039 | -0.0025 | -0.0032 | -0.009 | -0.00047 | 0.00032 | -0.0088 | 0.0098 | -0.0012 | -0.024 | -0.0057 | -0.0013 | -0.008 | -0.0043 | -0.0018 | 0.0028 | 0.002 | -0.00038 | -0.0003 | 0.0012 | 0.0031 | 0.012 | 0.069 | -0.086 | 0.0029 | -0.0025 | 0.025 | 0.0062 | -0.028 | -0.00022 | 0.11 | -0.039 | -0.034 | -0.00031 | 0.2 | 0.011 |
| Census_OSVersion | 0.092 | 0.14 | 0.37 | -0.0098 | -0.021 | 0.016 | -0.093 | -0.071 | 0.015 | -0.005 | -0.039 | -0.031 | 0.19 | 0.053 | 0.14 | 0.55 | 0.075 | 0.53 | 0.59 | 0.04 | -0.11 | -0.0077 | 0.52 | -0.001 | -0.0041 | 0.015 | 0.059 | 0.13 | 0.025 | -0.0015 | 0.0065 | 0.015 | 0.016 | -0.0087 | 0.041 | -0.0034 | 1 | 0.054 | 0.48 | 0.57 | 0.97 | 0.082 | 0.094 | 0.14 | -0.03 | -0.039 | 0.037 | 0.022 | 0.036 | 0.14 | 0.024 | 0.037 | 0.0024 | 0.053 | nan | 0.021 | 0.042 | -0.00057 | 0.066 | -0.12 | 0.036 | 0.0062 | 0.0041 | 0.006 | 0.0038 | 0.0059 | 0.023 | 0.12 | 0.071 | 0.045 | 0.095 | 0.047 | -0.0046 | 0.033 | 0.026 | 0.02 | 0.061 | -0.0034 | 0.05 | 0.1 | 0.16 | 0.066 | 0.1 | 0.043 | 0.011 | 0.25 | 0.13 | 0.01 | 0.082 | 0.13 | 0.013 | 0.075 | 0.08 | 0.092 | 0.046 | 0.034 | 0.07 | 0.043 | -0.5 | -0.0051 | 0.12 | 0.092 | 0.19 | 0.11 | -0.0057 | -0.00029 | -0.02 | -0.0023 | -0.07 | 0.00022 | -0.083 | 0.026 |
| Census_OSArchitecture | 0.072 | 0.034 | 0.071 | -0.012 | -0.013 | -0.0043 | -0.066 | -0.0078 | -0.0022 | -0.0081 | -0.028 | -0.0007 | 0.0035 | 0.99 | 0.0092 | 0.09 | -0.09 | 0.096 | 0.21 | 0.076 | -0.0099 | -0.0041 | 0.11 | -0.031 | 0.013 | 0.011 | 0.23 | -0.013 | 0.13 | 0.063 | -0.012 | 0.13 | 0.19 | 0.016 | 0.027 | 0.22 | 0.054 | 1 | 0.11 | 0.085 | 0.056 | -0.065 | -0.094 | 0.028 | 0.015 | -0.01 | 0.044 | 0.015 | -0.12 | 0.0037 | 0.0014 | 0.078 | 0.094 | -0.23 | nan | -0.006 | 0.076 | -0.023 | 0.3 | -0.043 | -0.0086 | 0.0009 | -0.0012 | -0.00082 | -0.00035 | 0.0051 | 0.0022 | 0.0049 | 0.0036 | 0.0095 | 0.011 | 0.01 | 0.0052 | 0.0069 | 0.016 | 0.0075 | 0.0065 | -0.0039 | 0.011 | 0.0055 | 0.013 | 0.0043 | 0.005 | 0.0048 | 0.0023 | -0.01 | 0.0071 | 0.004 | 0.038 | 0.013 | 0.0022 | 0.15 | 0.004 | 0.017 | 0.012 | 0.014 | 0.011 | 0.046 | -0.074 | -0.13 | 0.014 | -0.01 | 0.023 | -0.0024 | -0.052 | -0.00013 | 0.05 | -0.021 | -0.19 | -0.00018 | -0.11 | 0.0089 |
| Census_OSBranch | 0.037 | 0.11 | 0.32 | -0.02 | -0.039 | 0.0082 | -0.037 | -0.076 | 0.0074 | -0.0083 | -0.041 | -0.038 | 0.051 | 0.11 | 0.057 | 0.56 | 0.063 | 0.56 | 0.28 | -0.022 | -0.12 | -0.016 | 0.57 | -0.027 | 0.0097 | -0.0024 | 0.032 | -0.016 | 0.023 | -0.015 | 0.0086 | -0.014 | 3.3e-05 | 0.0016 | 0.03 | -0.032 | 0.48 | 0.11 | 1 | 0.61 | 0.46 | 0.028 | 0.055 | 0.18 | -0.035 | -0.054 | 0.05 | 0.053 | 0.067 | 0.088 | 0.016 | 0.036 | -0.012 | 0.054 | nan | -0.0057 | 0.041 | -0.011 | 0.082 | -0.1 | 0.024 | 0.0034 | 0.0017 | 0.0021 | 0.0017 | 0.0024 | 0.11 | 0.053 | 0.026 | 0.03 | 0.02 | 0.022 | 0.027 | 0.013 | 0.021 | 0.017 | 0.025 | -0.036 | 0.014 | 0.00061 | 0.088 | -0.015 | -0.032 | -0.0038 | 0.0032 | 0.49 | 0.018 | 0.0036 | 0.2 | 0.037 | 0.0049 | 0.43 | -0.018 | -0.024 | -0.022 | 0.0057 | -0.015 | -0.067 | -0.72 | -0.00087 | 0.047 | 0.03 | 0.085 | 0.067 | -0.012 | -0.00033 | -0.026 | 0.0022 | -0.11 | -0.00012 | -0.11 | 0.035 |
| Census_OSBuildNumber | 0.15 | 0.11 | 0.39 | -0.02 | -0.035 | 0.018 | -0.14 | -0.064 | 0.01 | -0.01 | -0.057 | -0.037 | 0.23 | 0.085 | 0.21 | 0.86 | 0.041 | 0.78 | 0.56 | 0.063 | -0.11 | -0.013 | 0.66 | -0.01 | 0.00042 | 0.02 | 0.043 | 0.047 | 0.025 | -0.0084 | 0.013 | 0.016 | 0.017 | -0.00025 | 0.024 | -0.012 | 0.57 | 0.085 | 0.61 | 1 | 0.51 | 0.065 | 0.08 | 0.14 | -0.047 | -0.058 | -0.043 | 0.074 | 0.052 | 0.26 | 0.054 | 0.032 | -0.0039 | 0.017 | nan | 0.013 | 0.034 | -0.02 | 0.073 | -0.11 | 0.039 | 0.009 | 0.007 | 0.0078 | 0.0055 | 0.011 | 0.14 | 0.059 | 0.029 | 0.096 | 0.068 | 0.065 | 0.087 | 0.05 | 0.066 | 0.062 | 0.069 | 0.086 | 0.068 | 0.067 | 0.11 | 0.06 | 0.079 | 0.0095 | 0.018 | 0.16 | 0.07 | 0.017 | 0.35 | 0.1 | 0.017 | 0.075 | 0.045 | 0.061 | 0.059 | 0.035 | 0.055 | 0.19 | -0.58 | -0.032 | 0.15 | 0.11 | 0.15 | 0.14 | 0.00073 | -0.00028 | -0.012 | -0.0071 | -0.097 | 0.00027 | -0.11 | 0.011 |
| Census_OSBuildRevision | 0.094 | 0.14 | 0.36 | -0.0094 | -0.02 | 0.018 | -0.094 | -0.07 | 0.015 | -0.0057 | -0.036 | -0.027 | 0.19 | 0.056 | 0.14 | 0.5 | 0.085 | 0.52 | 0.56 | 0.034 | -0.11 | -0.008 | 0.47 | -0.00086 | -0.0023 | 0.015 | 0.058 | 0.13 | 0.024 | -0.0026 | 0.0061 | 0.012 | 0.011 | -0.0059 | 0.041 | -0.0067 | 0.97 | 0.056 | 0.46 | 0.51 | 1 | 0.083 | 0.1 | 0.15 | -0.022 | -0.036 | 0.056 | 0.0051 | 0.041 | 0.095 | 0.014 | 0.039 | 0.00019 | 0.064 | nan | 0.018 | 0.045 | 0.0002 | 0.069 | -0.12 | 0.036 | 0.0062 | 0.0042 | 0.006 | 0.0039 | 0.0055 | 0.026 | 0.12 | 0.065 | -0.034 | 0.099 | 0.047 | -0.005 | 0.034 | 0.028 | 0.022 | 0.062 | -0.0038 | 0.053 | 0.1 | 0.17 | 0.065 | 0.1 | 0.0068 | 0.011 | 0.23 | 0.14 | 0.01 | 0.092 | 0.14 | 0.013 | 0.089 | 0.083 | 0.095 | 0.047 | 0.034 | 0.072 | 0.049 | -0.5 | 0.0042 | 0.12 | 0.09 | 0.18 | 0.024 | -0.0054 | -0.0003 | -0.028 | -1.2e-06 | -0.073 | 0.00024 | -0.085 | 0.029 |
| Census_OSEdition | -0.042 | -0.0052 | 0.05 | 0.0034 | -7.7e-05 | -0.0057 | 0.033 | -0.12 | -0.069 | 0.013 | -0.034 | -0.077 | 0.1 | -0.065 | -0.0095 | 0.048 | -0.14 | 0.053 | 0.073 | 0.44 | -0.025 | -0.0084 | 0.039 | 0.073 | -0.048 | 0.013 | 0.011 | 0.31 | -0.0067 | -0.021 | -0.038 | -0.0058 | -0.035 | -0.0022 | 0.051 | -0.0069 | 0.082 | -0.065 | 0.028 | 0.065 | 0.083 | 1 | 0.67 | -0.0052 | -0.063 | -0.14 | -0.048 | 0.055 | 0.11 | 0.041 | 4.9e-05 | -0.0059 | -0.0088 | 0.033 | nan | 0.088 | -0.065 | -0.0014 | -0.064 | 0.027 | -0.15 | 0.0011 | 0.0015 | 0.0024 | 0.0011 | -0.0014 | 0.051 | 0.0045 | 0.003 | -0.0057 | 0.00026 | -0.003 | 0.002 | -0.00077 | -0.0081 | -0.0059 | -0.0047 | 0.0035 | -0.0073 | -0.0025 | -0.0073 | -0.0021 | -0.0056 | -0.0024 | 2e-05 | -0.011 | -0.0038 | -0.0016 | 0.07 | -0.0078 | 0.00089 | -0.019 | 0.012 | -3.7e-05 | -0.0039 | 0.0044 | 0.0049 | -0.0016 | -0.038 | 0.0082 | 0.03 | 0.038 | 0.062 | 0.029 | 0.072 | -0.00033 | 0.011 | 0.001 | -0.0097 | -0.00046 | -0.026 | -0.02 |
| Census_OSSkuName | -0.037 | 0.014 | 0.039 | 0.01 | 0.0097 | 0.012 | 0.028 | -0.084 | -0.026 | -0.00045 | 0.012 | -0.027 | 0.14 | -0.093 | 0.026 | 0.072 | 0.5 | 0.086 | 0.07 | -0.09 | -0.027 | -0.0061 | 0.04 | 0.039 | -0.0043 | -0.013 | 0.022 | 0.32 | -0.012 | -0.061 | -0.00047 | -0.068 | -0.095 | -0.026 | 0.038 | -0.092 | 0.094 | -0.094 | 0.055 | 0.08 | 0.1 | 0.67 | 1 | 0.078 | -0.0015 | -0.06 | 0.043 | -0.073 | 0.17 | 0.0056 | -0.0044 | 0.0023 | -0.045 | 0.26 | nan | 0.056 | 0.0058 | -0.036 | -0.0094 | 0.014 | -0.072 | 0.0033 | 0.0044 | 0.0047 | 0.0024 | 0.0003 | 0.023 | -0.003 | 0.0045 | -0.019 | -0.0044 | -0.008 | 0.0031 | 0.0042 | -0.0018 | -0.0043 | -0.014 | -0.0052 | 0.0056 | 0.006 | 0.01 | 0.00075 | 0.0065 | -0.0012 | 0.0017 | 0.026 | 0.0063 | -0.00024 | 0.048 | 0.011 | 0.002 | -0.02 | 0.022 | 0.0094 | 0.0074 | 0.005 | 0.013 | 0.028 | -0.09 | 0.047 | 0.031 | 0.049 | 0.031 | 0.0015 | 0.029 | -7.5e-06 | -0.084 | 0.036 | -0.031 | -1e-05 | -0.057 | 0.036 |
| Census_OSInstallTypeName | 0.011 | 0.037 | 0.067 | 0.0006 | -0.0025 | -0.0074 | -0.014 | -0.15 | 0.031 | 0.0016 | -0.034 | -0.043 | 0.011 | 0.028 | 0.019 | 0.12 | 0.17 | 0.15 | 0.089 | -0.15 | -0.033 | 0.012 | 0.11 | -0.061 | 0.024 | -0.0088 | 0.055 | -0.021 | 0.025 | -0.014 | 0.014 | -0.035 | -0.0016 | -0.087 | 0.027 | -0.0047 | 0.14 | 0.028 | 0.18 | 0.14 | 0.15 | -0.0052 | 0.078 | 1 | 0.019 | -0.0028 | -0.018 | -0.048 | 0.084 | -0.049 | -0.0058 | 0.025 | -0.0013 | 0.16 | nan | -0.036 | 0.049 | 0.012 | 0.098 | 0.015 | 0.00089 | 0.0012 | 0.00074 | 0.0014 | 0.00088 | 0.0016 | -0.023 | 0.00026 | 0.0085 | 0.0017 | 0.013 | 0.0042 | 0.024 | 0.016 | 0.013 | 0.0098 | -0.0034 | 0.019 | 0.022 | 0.026 | 0.04 | 0.01 | 0.023 | 0.005 | 0.0014 | 0.15 | 0.02 | 0.0016 | 0.0024 | 0.035 | 0.0012 | 0.03 | 0.015 | 0.023 | 0.025 | 0.0037 | 0.021 | 0.081 | -0.18 | 0.0092 | 0.003 | 0.0047 | 0.0093 | -0.021 | -0.094 | 0.00067 | 0.013 | 0.00018 | -0.053 | -0.0003 | -0.03 | 0.072 |
| Census_OSInstallLanguageIdentifier | 0.0048 | -0.0041 | -0.023 | 0.0045 | 0.0068 | 0.023 | -0.0046 | -0.061 | 0.044 | -0.0024 | 0.26 | 0.28 | 0.0025 | 0.015 | 0.0071 | -0.037 | 0.055 | -0.039 | -0.025 | -0.057 | 0.046 | -0.0023 | -0.038 | 0.0043 | 0.03 | -0.00065 | -0.0012 | -0.011 | 0.025 | 0.05 | 0.037 | 0.035 | 0.055 | 0.012 | -0.029 | 0.021 | -0.03 | 0.015 | -0.035 | -0.047 | -0.022 | -0.063 | -0.0015 | 0.019 | 1 | 0.67 | 0.044 | -0.058 | -0.062 | -0.013 | -0.0011 | -0.002 | 0.052 | -0.00047 | nan | -0.011 | -0.054 | -0.019 | -0.0057 | -0.0069 | -0.037 | -0.00045 | 0.00032 | -4.3e-05 | 0.00059 | -0.00026 | -0.031 | -0.0087 | -0.0035 | -0.048 | -0.025 | -0.0032 | -0.006 | -0.0046 | -0.0018 | -0.0087 | -0.016 | -0.011 | -0.0027 | -0.0056 | -0.0042 | -0.002 | 0.00041 | -0.00068 | -0.00071 | -0.0098 | -0.00098 | 0.00032 | -0.037 | -0.0042 | -1.4e-05 | -0.0016 | 0.00064 | 0.00084 | 0.00059 | 0.0013 | 0.00045 | -0.00085 | 0.021 | 0.023 | 0.0017 | 0.0036 | 0.0036 | -0.00025 | 0.0013 | -0.00017 | -0.022 | 0.0084 | 0.053 | -0.00049 | 0.028 | -0.018 |
| Census_OSUILocaleIdentifier | 0.014 | -0.013 | -0.034 | 0.00037 | 0.0029 | 0.017 | -0.013 | 0.0068 | 0.08 | 0.011 | 0.11 | 0.31 | -0.001 | -0.0099 | 0.0043 | -0.048 | 0.033 | -0.05 | -0.031 | -0.039 | 0.046 | 0.0005 | -0.041 | -0.0049 | 0.028 | 0.0017 | -0.0076 | -0.013 | 0.021 | 0.045 | -0.0028 | 0.018 | 0.074 | 0.0096 | -0.034 | 0.025 | -0.039 | -0.01 | -0.054 | -0.058 | -0.036 | -0.14 | -0.06 | -0.0028 | 0.67 | 1 | 0.036 | -0.044 | -0.038 | -0.014 | -0.00078 | 0.014 | 0.051 | -0.015 | nan | -0.011 | -0.05 | -0.014 | 0.0041 | -0.013 | 0.056 | -0.00075 | 0.0002 | -0.0016 | -8.2e-05 | -0.00022 | -0.029 | -0.01 | -0.0044 | -0.0085 | -0.014 | -0.0041 | -0.0067 | -0.0053 | -0.0012 | -0.0078 | -0.015 | -0.014 | -0.0032 | -0.0044 | -0.0059 | -0.0041 | -0.0031 | -0.00041 | -0.00093 | -0.021 | -0.003 | 0.0002 | -0.041 | -0.0058 | -0.00051 | -0.0097 | -0.0021 | -0.0028 | -0.0016 | 0.0013 | -0.0011 | -0.012 | 0.052 | 0.0009 | 0.0026 | -0.0015 | 0.0024 | -0.0017 | -0.014 | 0.0003 | 0.0091 | 0.0016 | 0.038 | -0.00042 | 0.041 | -0.016 |
| Census_OSWUAutoUpdateOptionsName | 0.008 | -0.0029 | -0.037 | 0.0088 | 0.019 | 0.033 | -0.0092 | -0.0066 | 0.027 | -0.00042 | 0.028 | 0.046 | 0.072 | 0.043 | 0.004 | -0.033 | 0.13 | -0.035 | -0.0056 | -0.083 | 0.013 | -0.008 | -0.021 | 0.039 | -0.00047 | -0.0018 | 0.058 | 0.18 | 0.011 | 0.011 | -0.0032 | -0.0054 | -0.016 | 0.065 | 0.023 | 0.026 | 0.037 | 0.044 | 0.05 | -0.043 | 0.056 | -0.048 | 0.043 | -0.018 | 0.044 | 0.036 | 1 | -0.23 | -0.036 | -0.11 | 0.0025 | 0.025 | 0.02 | 0.1 | nan | 0.00051 | 0.046 | 0.037 | 0.058 | 0.0011 | 0.06 | 0.0011 | 0.0008 | 0.00091 | 0.00072 | 0.00016 | -0.076 | -0.032 | -0.014 | -0.064 | -0.044 | -0.034 | -0.052 | -0.022 | -0.019 | -0.028 | -0.031 | -0.075 | -0.015 | -0.019 | -0.036 | -0.026 | -0.027 | 0.0014 | -0.00026 | 0.14 | 0.021 | 0.00026 | -0.16 | 0.041 | 6.1e-05 | 0.011 | 0.027 | 0.02 | 0.021 | 0.0049 | 0.016 | 0.019 | -0.074 | 0.1 | 0.0095 | 0.014 | 0.00057 | -0.0071 | 0.022 | -2.1e-05 | -0.13 | 0.044 | 0.01 | -3.3e-05 | 0.0083 | 0.034 |
| Census_GenuineStateName | -0.012 | 0.0075 | 0.054 | -0.0086 | -0.024 | -0.021 | 0.0075 | -0.0053 | -0.028 | 0.0076 | -0.062 | -0.053 | -0.0043 | 0.015 | -0.0069 | 0.058 | -0.21 | 0.063 | 0.022 | 0.19 | -0.0063 | 0.0016 | 0.05 | -0.0098 | -0.021 | 0.0067 | -0.02 | 0.0079 | 0.013 | -0.0002 | 0.0015 | 0.056 | 0.074 | -0.03 | 0.047 | 0.055 | 0.022 | 0.015 | 0.053 | 0.074 | 0.0051 | 0.055 | -0.073 | -0.048 | -0.058 | -0.044 | -0.23 | 1 | -0.11 | 0.13 | -0.0017 | -0.0031 | 0.0082 | -0.2 | nan | 0.15 | -0.073 | -0.034 | -0.055 | -0.043 | -0.059 | -0.00043 | -0.0013 | -0.00078 | -0.00087 | 0.00083 | 0.077 | 0.016 | 0.00089 | 0.069 | 0.028 | 0.027 | 0.02 | -0.0031 | -0.0039 | 0.001 | 0.014 | 0.06 | -0.0065 | -0.0051 | -0.0086 | 0.0076 | -0.011 | 0.0051 | 0.00029 | -0.0051 | -0.0092 | -0.0011 | 0.11 | -0.016 | 0.00018 | 0.01 | -0.0061 | -0.011 | -0.011 | -0.0014 | -0.0073 | -0.015 | -0.013 | -0.065 | -0.0052 | -0.00094 | -0.0034 | 0.00096 | -0.013 | -0.00014 | 0.12 | -0.034 | -0.0025 | -0.00019 | -0.031 | -0.066 |
| Census_ActivationChannel | -0.093 | -0.0094 | 0.02 | 0.0077 | 0.013 | -0.0043 | 0.091 | 0.047 | -0.028 | 0.011 | 0.05 | 0.00064 | -0.14 | -0.12 | -0.15 | -0.008 | 0.16 | -0.011 | -0.018 | -0.081 | -0.029 | 0.0063 | 0.016 | 0.019 | -0.015 | -0.015 | 0.082 | 0.023 | -0.0041 | -0.075 | -0.061 | -0.2 | -0.035 | -0.074 | 0.051 | -0.032 | 0.036 | -0.12 | 0.067 | 0.052 | 0.041 | 0.11 | 0.17 | 0.084 | -0.062 | -0.038 | -0.036 | -0.11 | 1 | 0.037 | -0.0073 | 0.0043 | -0.046 | 0.34 | nan | -0.027 | 0.087 | 0.069 | 0.15 | 0.037 | -0.029 | -0.0077 | -0.0057 | -0.0073 | -0.0045 | -0.0071 | 0.06 | 0.031 | 0.012 | -0.0097 | 0.0071 | -0.0065 | 0.013 | 0.0052 | -0.009 | 0.0044 | 0.00087 | 0.0032 | -0.0027 | 0.0068 | 0.0066 | -0.00025 | -0.003 | 0.0042 | -0.011 | 0.068 | -0.0046 | -0.011 | 0.13 | -0.0015 | -0.011 | -0.037 | -0.0021 | 0.00029 | 0.005 | -0.014 | 0.0039 | 0.023 | -0.024 | -0.014 | -0.081 | -0.068 | -0.041 | -0.0012 | 0.049 | -0.00036 | -0.12 | 0.058 | -0.016 | -0.00051 | 0.08 | 0.2 |
| Census_FlightRing | -0.019 | 0.019 | 0.075 | 0.00013 | -0.0036 | -0.002 | 0.017 | -0.022 | 0.006 | -0.0011 | -0.033 | -0.026 | -0.028 | 0.004 | -0.025 | 0.25 | -0.057 | 0.13 | 0.13 | 0.08 | -0.021 | 0.0084 | 0.2 | 0.013 | -0.0023 | -0.0011 | 0.0051 | -0.0087 | 0.0042 | 0.0068 | -0.0047 | 0.016 | 0.031 | -0.0012 | 0.0072 | 0.0097 | 0.14 | 0.0037 | 0.088 | 0.26 | 0.095 | 0.041 | 0.0056 | -0.049 | -0.013 | -0.014 | -0.11 | 0.13 | 0.037 | 1 | 0.068 | -0.00049 | 0.011 | -0.043 | nan | 0.026 | -0.0008 | 0.0053 | 0.0024 | -0.033 | 0.0072 | -0.0016 | -0.0013 | -0.001 | -0.00057 | -0.0011 | 0.12 | 0.05 | 0.022 | -0.0094 | -0.0065 | -0.0049 | -0.0078 | -0.0058 | -0.0044 | -0.0068 | -0.0046 | -0.0093 | -0.0038 | -0.0042 | -0.012 | -0.00073 | -0.013 | 0.0017 | -0.0022 | -0.048 | -0.0056 | -0.0021 | 0.25 | -0.016 | -0.0013 | -0.051 | -0.0083 | -0.01 | -0.01 | -0.0028 | -0.0076 | -0.031 | 1.6e-05 | -0.054 | 0.025 | -0.00051 | 0.086 | 0.17 | 0.015 | -9.1e-05 | 0.036 | -0.012 | 0.00046 | -0.00013 | 0.01 | 0.0064 |
| Census_ThresholdOptIn | 0.0006 | 0.0011 | 0.0024 | -0.00053 | -0.00054 | 9.8e-05 | -0.00044 | -0.002 | 0.0028 | 0.00011 | -0.0028 | -0.0021 | 0.0013 | 0.0013 | 0.0014 | 0.037 | -0.0075 | 0.013 | 0.023 | 0.0056 | 0.00058 | -0.0002 | 0.029 | 0.0029 | -5.2e-05 | 0.0005 | 0.0012 | -0.00039 | 0.0003 | 0.0017 | 0.00036 | 0.004 | 0.0047 | 0.0037 | 0.00067 | 0.0022 | 0.024 | 0.0014 | 0.016 | 0.054 | 0.014 | 4.9e-05 | -0.0044 | -0.0058 | -0.0011 | -0.00078 | 0.0025 | -0.0017 | -0.0073 | 0.068 | 1 | -0.00014 | 0.0012 | -0.0057 | nan | 0.0047 | 0.0012 | 0.0019 | -0.00026 | 4.3e-06 | 0.0049 | -8.3e-05 | -6.1e-05 | -7.8e-05 | -4.8e-05 | -7.7e-05 | -8.2e-05 | -0.00026 | -0.00013 | -0.00058 | 3.9e-06 | -0.00039 | -0.00052 | 0.00023 | -0.00039 | 6.8e-05 | 0.0004 | -3.6e-05 | 0.00028 | -0.00016 | -7.6e-05 | -0.00021 | 0.00014 | -8.8e-05 | -0.00012 | -0.0012 | -0.00027 | -0.00012 | -0.00044 | 2.1e-06 | -0.00013 | -0.0018 | -0.00042 | -0.00027 | -0.00054 | -0.00025 | 0.0002 | -0.001 | -0.0043 | -0.0014 | 0.013 | 0.0029 | 0.017 | 0.021 | 0.0029 | -3.9e-06 | 0.003 | -0.00083 | 0.003 | -5.5e-06 | 0.0028 | 0.00015 |
| Census_FirmwareManufacturerIdentifier | 0.012 | 0.011 | 0.015 | -0.0056 | -0.0073 | -0.00056 | -0.01 | 0.0075 | 0.0037 | -0.00078 | 0.0097 | 0.0078 | 0.013 | 0.078 | 0.0012 | 0.03 | 0.02 | 0.033 | 0.037 | -0.0048 | -0.01 | 0.007 | 0.029 | -0.0057 | 0.0061 | -0.001 | 0.037 | 0.036 | 0.15 | 0.0086 | 0.00033 | 0.023 | 0.01 | -0.014 | 0.037 | 0.00028 | 0.037 | 0.078 | 0.036 | 0.032 | 0.039 | -0.0059 | 0.0023 | 0.025 | -0.002 | 0.014 | 0.025 | -0.0031 | 0.0043 | -0.00049 | -0.00014 | 1 | 0.065 | -0.0012 | nan | 0.045 | 0.053 | 0.049 | 0.078 | -0.029 | 0.0077 | -0.0015 | -0.0024 | -0.0023 | -0.00028 | -0.00042 | 0.0027 | 0.00036 | 0.0016 | -0.00088 | 0.0054 | 0.0048 | -0.0013 | 0.0026 | 0.0057 | 0.0033 | 0.0011 | -0.003 | 0.0067 | 0.01 | 0.00013 | 0.0025 | 0.004 | -0.00063 | -0.00063 | 0.0088 | 0.0042 | -3e-05 | 0.01 | 0.0073 | -8.2e-05 | 0.028 | 0.0038 | 0.0051 | 0.0039 | 0.0027 | 0.0036 | 0.01 | -0.043 | 0.0025 | 0.0042 | -0.00016 | 0.0047 | -0.0027 | -0.0063 | -0.00017 | -0.002 | -0.00044 | -0.027 | 0.00014 | -0.016 | 0.052 |
| Census_FirmwareVersionIdentifier | 0.024 | -0.007 | -0.0075 | -0.0049 | -0.0037 | 0.00015 | -0.023 | -0.035 | 0.017 | 0.013 | -0.029 | -0.016 | 0.019 | 0.093 | 0.0066 | 0.0035 | -0.082 | -0.0011 | 0.027 | 0.08 | 0.021 | 0.00037 | 0.0064 | 0.0017 | -0.015 | 0.019 | 0.099 | 0.034 | 0.21 | 0.29 | -0.03 | 0.11 | 0.11 | 0.005 | 0.055 | 0.083 | 0.0024 | 0.094 | -0.012 | -0.0039 | 0.00019 | -0.0088 | -0.045 | -0.0013 | 0.052 | 0.051 | 0.02 | 0.0082 | -0.046 | 0.011 | 0.0012 | 0.065 | 1 | -0.12 | nan | -0.016 | 0.044 | 0.011 | 0.06 | -0.0091 | 0.054 | -7.8e-05 | 0.00071 | 0.00011 | 8.8e-05 | 0.0012 | -0.0038 | -0.0014 | -0.00033 | 0.0013 | -0.00058 | 0.0041 | -0.00073 | -0.0012 | 0.0021 | -0.00093 | 0.00047 | -0.0033 | -0.00018 | -0.0073 | -0.00068 | -7.2e-05 | -0.00044 | 0.0047 | 0.00045 | -0.011 | -0.0021 | 0.001 | -0.01 | -0.0047 | 0.0012 | 0.0066 | -0.0015 | -0.0022 | -0.0019 | 0.0035 | -0.0017 | -0.0024 | 0.022 | -0.029 | 0.011 | 0.0032 | 0.019 | 0.0049 | -0.017 | 7.7e-05 | 0.068 | -0.022 | -0.0047 | -0.00024 | 0.062 | 0.058 |
| Census_IsSecureBootEnabled | -0.093 | 0.02 | 0.0085 | 0.017 | 0.02 | 0.023 | 0.086 | 0.019 | -0.0076 | -0.0024 | 0.11 | 0.044 | 0.024 | -0.23 | 0.034 | 0.0057 | 0.45 | 0.02 | -0.03 | -0.4 | -0.038 | 0.017 | -0.023 | -0.026 | 0.031 | -0.035 | -0.019 | -0.032 | -0.12 | -0.098 | -0.021 | -0.2 | -0.19 | -0.06 | 0.054 | -0.2 | 0.053 | -0.23 | 0.054 | 0.017 | 0.064 | 0.033 | 0.26 | 0.16 | -0.00047 | -0.015 | 0.1 | -0.2 | 0.34 | -0.043 | -0.0057 | -0.0012 | -0.12 | 1 | nan | -0.072 | 0.16 | 0.12 | 0.067 | -0.025 | -0.029 | 0.0035 | 0.0049 | 0.0051 | 0.0019 | -0.003 | -0.044 | -0.0097 | 0.0014 | -0.029 | -0.0063 | -0.016 | 0.00059 | 0.0033 | -0.01 | -0.0019 | -0.016 | -0.017 | 0.0093 | 0.013 | 0.015 | -0.0021 | 0.011 | -0.0042 | 0.00086 | 0.07 | 0.0094 | -0.0024 | -0.051 | 0.02 | 0.00018 | -0.0014 | 0.0081 | 0.0099 | 0.012 | -0.004 | 0.012 | 0.032 | -0.1 | 0.099 | 0.00058 | 0.032 | -0.033 | -0.011 | -0.0076 | 0.00041 | -0.21 | 0.08 | -0.0091 | 9.3e-06 | 0.033 | 0.22 |
| Census_IsWIMBootEnabled | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan | nan |
| Census_IsVirtualDevice | -0.0044 | -0.0025 | -0.0026 | -0.0079 | -0.01 | -0.0082 | 0.0021 | -0.0096 | 0.0014 | 0.016 | -0.02 | -0.018 | 0.061 | -0.006 | -0.011 | 0.01 | -0.038 | 0.0035 | 0.026 | 0.1 | 0.012 | -0.0013 | 0.011 | 0.022 | -0.043 | 0.0049 | 0.049 | 0.2 | 0.025 | -0.028 | -0.02 | 0.084 | 0.15 | -0.019 | 0.18 | 0.037 | 0.021 | -0.006 | -0.0057 | 0.013 | 0.018 | 0.088 | 0.056 | -0.036 | -0.011 | -0.011 | 0.00051 | 0.15 | -0.027 | 0.026 | 0.0047 | 0.045 | -0.016 | -0.072 | nan | 1 | -0.024 | 0.037 | -0.021 | -0.044 | 0.023 | -0.00026 | -0.00053 | -0.00068 | -0.00042 | -0.00017 | 0.005 | -0.00055 | -0.00085 | 0.0064 | -0.0022 | 0.0043 | 0.001 | -0.0021 | -0.0021 | -0.0015 | 0.0011 | 0.0047 | -0.0025 | -0.0025 | -0.0036 | -0.00057 | -0.0028 | -0.00055 | -0.00029 | -0.0078 | -0.0017 | 9.8e-05 | 0.01 | -0.0028 | -0.00065 | -0.0044 | 0.013 | -0.00083 | -0.0014 | 0.0016 | 0.0015 | -0.0028 | 0.01 | -0.018 | 0.0062 | 0.0096 | 0.017 | 0.0077 | -0.0019 | -3.4e-05 | 0.05 | -0.011 | 0.08 | -5e-05 | 0.04 | -0.0036 |
| Census_IsTouchEnabled | -0.03 | 0.0015 | 0.0054 | -0.0087 | -0.0061 | -0.0086 | 0.029 | 0.031 | -0.0084 | 0.0058 | 0.022 | -0.0079 | -0.0012 | 0.076 | 0.0015 | 0.026 | 0.091 | 0.028 | 0.043 | -0.083 | -0.0018 | 0.016 | 0.022 | -0.014 | 0.016 | -0.011 | 0.51 | -0.0099 | 0.055 | 0.034 | -0.075 | -0.029 | 0.075 | -0.057 | 0.27 | 0.17 | 0.042 | 0.076 | 0.041 | 0.034 | 0.045 | -0.065 | 0.0058 | 0.049 | -0.054 | -0.05 | 0.046 | -0.073 | 0.087 | -0.0008 | 0.0012 | 0.053 | 0.044 | 0.16 | nan | -0.024 | 1 | 0.46 | 0.45 | -0.05 | 0.054 | 0.00053 | 0.00059 | 0.00098 | 0.00054 | -0.0013 | -0.016 | -0.0004 | 0.0024 | -0.0069 | 0.0027 | -0.00085 | -5e-06 | 0.0066 | -0.0011 | 0.0017 | -0.0016 | -0.0057 | 0.0075 | 0.0048 | 0.01 | 0.0028 | 0.0019 | 0.0069 | -0.0011 | 0.014 | 0.0059 | -0.0011 | -0.003 | 0.013 | -0.00043 | 0.035 | 0.0061 | 0.0079 | 0.0087 | -0.00079 | 0.011 | 0.027 | -0.042 | -0.0082 | 0.00049 | 0.005 | -0.0097 | -0.00089 | -0.014 | -0.00015 | 0.011 | -0.0087 | -0.07 | -0.00022 | 0.18 | 0.45 |
| Census_IsPenCapable | -0.017 | -0.012 | -0.022 | -0.0054 | -0.0013 | -0.0079 | 0.017 | 0.0011 | 2.9e-05 | 0.013 | 0.0058 | -0.011 | -0.013 | -0.023 | -0.014 | -0.024 | -0.053 | -0.029 | -0.012 | 0.037 | 0.012 | 0.024 | -0.018 | -0.0078 | 0.0098 | -0.0057 | 0.33 | 0.0015 | 0.0051 | -0.0077 | -0.058 | 0.0079 | 0.1 | -0.038 | 0.26 | 0.17 | -0.00057 | -0.023 | -0.011 | -0.02 | 0.0002 | -0.0014 | -0.036 | 0.012 | -0.019 | -0.014 | 0.037 | -0.034 | 0.069 | 0.0053 | 0.0019 | 0.049 | 0.011 | 0.12 | nan | 0.037 | 0.46 | 1 | 0.38 | -0.019 | 0.058 | -0.0007 | -0.00086 | -0.00025 | 0.00035 | -0.0014 | -0.011 | -0.002 | -0.0018 | -0.0086 | -0.0019 | -0.0038 | -0.0065 | -0.0035 | -0.004 | -0.0026 | -0.0041 | -0.0081 | -0.0031 | -0.0032 | -0.0036 | -0.0023 | -0.0033 | -0.00068 | -0.001 | 0.0025 | -0.002 | -0.0014 | -0.018 | -0.0015 | -0.0013 | 0.0046 | 0.0011 | 1.7e-05 | -0.00078 | -0.0018 | 0.00084 | -0.0033 | 0.017 | -0.0037 | -0.0061 | -0.0055 | -0.0066 | 0.0017 | -0.012 | -8e-05 | 0.029 | -0.0092 | -0.012 | -0.00012 | 0.3 | 0.44 |
| Census_IsAlwaysOnAlwaysConnectedCapable | -0.025 | 0.0016 | 0.015 | -0.018 | -0.016 | -0.018 | 0.026 | -0.014 | 0.0074 | 0.0037 | -0.004 | -0.0042 | -0.024 | 0.3 | -0.02 | 0.057 | 0.056 | 0.06 | 0.1 | -0.057 | 0.0048 | 0.015 | 0.061 | -0.026 | 0.019 | -0.0097 | 0.66 | -0.0098 | 0.17 | -0.014 | -0.088 | -0.033 | 0.19 | -0.057 | 0.18 | 0.46 | 0.066 | 0.3 | 0.082 | 0.073 | 0.069 | -0.064 | -0.0094 | 0.098 | -0.0057 | 0.0041 | 0.058 | -0.055 | 0.15 | 0.0024 | -0.00026 | 0.078 | 0.06 | 0.067 | nan | -0.021 | 0.45 | 0.38 | 1 | -0.031 | 0.051 | -0.0011 | -0.0011 | -0.00095 | -0.00067 | -0.0017 | -0.014 | 0.0018 | 0.0042 | -0.0028 | 0.0078 | 0.0018 | 0.0091 | 0.011 | 0.0046 | 0.0003 | -0.00087 | 0.00015 | 0.0081 | 0.0029 | 0.022 | 0.0073 | 0.0032 | 0.017 | -0.0017 | 0.028 | 0.0072 | -0.002 | 0.012 | 0.021 | -0.0017 | 0.062 | 0.0085 | 0.015 | 0.018 | 0.00063 | 0.019 | 0.067 | -0.064 | -0.05 | -0.01 | -0.0096 | -0.013 | -0.0016 | -0.041 | -9.8e-05 | 0.07 | -0.021 | -0.2 | -0.00015 | 0.16 | 0.47 |
| Wdft_IsGamer | -0.062 | -0.084 | -0.12 | 0.01 | 0.031 | -0.0057 | 0.062 | 0.021 | -0.0086 | -0.0054 | 0.0075 | -0.0092 | -0.1 | -0.043 | -0.093 | -0.13 | -0.017 | -0.15 | -0.091 | -0.00042 | 0.034 | -0.0019 | -0.099 | 0.016 | 0.0057 | -0.0077 | -0.018 | -0.024 | 0.0024 | -0.04 | 0.023 | -0.012 | 0.01 | 0.044 | -0.068 | 0.026 | -0.12 | -0.043 | -0.1 | -0.11 | -0.12 | 0.027 | 0.014 | 0.015 | -0.0069 | -0.013 | 0.0011 | -0.043 | 0.037 | -0.033 | 4.3e-06 | -0.029 | -0.0091 | -0.025 | nan | -0.044 | -0.05 | -0.019 | -0.031 | 1 | 0.0017 | -0.0042 | -0.0031 | -0.0041 | -0.0025 | -0.0044 | 0.00077 | 0.00046 | -0.00022 | -0.006 | -0.0039 | -0.0076 | -0.0085 | -0.007 | -0.013 | -0.0084 | -0.011 | -0.019 | -0.012 | -0.012 | -0.019 | -0.013 | -0.02 | -0.00097 | -0.0075 | -0.027 | -0.017 | -0.0071 | -0.014 | -0.023 | -0.0074 | -0.081 | -0.015 | -0.015 | -0.015 | -0.013 | -0.016 | -0.023 | 0.14 | 0.0034 | -0.054 | -0.046 | -0.016 | 0.011 | -0.011 | -0.00025 | -0.018 | 0.022 | 0.067 | -0.00035 | 0.08 | -0.035 |
| Wdft_RegionIdentifier | 0.047 | 0.001 | 0.0067 | -0.007 | 0.0014 | 0.015 | -0.043 | -0.005 | 0.2 | 0.0022 | -0.055 | -0.096 | 0.033 | -0.0089 | 0.028 | 0.054 | -0.028 | 0.051 | 0.041 | 0.044 | 0.0076 | 0.01 | 0.043 | -0.029 | 0.0088 | 0.0089 | 0.034 | 0.014 | 0.011 | 0.078 | -0.017 | 0.032 | 0.073 | -0.0045 | 0.00087 | 0.049 | 0.036 | -0.0086 | 0.024 | 0.039 | 0.036 | -0.15 | -0.072 | 0.00089 | -0.037 | 0.056 | 0.06 | -0.059 | -0.029 | 0.0072 | 0.0049 | 0.0077 | 0.054 | -0.029 | nan | 0.023 | 0.054 | 0.058 | 0.051 | 0.0017 | 1 | 0.00056 | 0.0011 | -0.00095 | 0.00028 | 0.0012 | 0.0047 | 0.0015 | 0.00035 | -0.0013 | 0.0019 | 0.0012 | 0.00016 | 0.0038 | 0.0077 | 0.0037 | 0.0017 | -0.0047 | 0.0066 | 0.0032 | 0.013 | 0.0024 | 0.011 | 0.00069 | 0.0016 | -0.0039 | 0.0059 | 0.0017 | 0.0036 | 0.0073 | 0.0012 | 0.0037 | 0.0039 | 0.0042 | 0.0021 | 0.0043 | 0.004 | -0.0015 | -0.035 | 0.015 | 0.023 | 0.0077 | 0.03 | 0.01 | -0.073 | -0.00045 | 0.024 | -0.0076 | 0.034 | 0.00033 | 0.1 | 0.046 |
| Interaction_MYYear_14Month_3 | 0.017 | 0.0043 | 0.048 | -0.00092 | -0.0011 | 0.0073 | -0.018 | -0.0029 | -2.4e-05 | -6.9e-05 | -0.0018 | -0.0011 | 0.044 | 0.0009 | 0.044 | 0.023 | 0.0038 | 0.029 | 0.068 | -6e-05 | -0.03 | -0.00018 | 0.099 | -0.0013 | -0.00084 | 0.0046 | -0.0012 | -0.00034 | -0.00084 | -0.00064 | 0.00025 | 0.0025 | -0.0018 | 0.00015 | -0.00021 | -0.0015 | 0.0062 | 0.0009 | 0.0034 | 0.009 | 0.0062 | 0.0011 | 0.0033 | 0.0012 | -0.00045 | -0.00075 | 0.0011 | -0.00043 | -0.0077 | -0.0016 | -8.3e-05 | -0.0015 | -7.8e-05 | 0.0035 | nan | -0.00026 | 0.00053 | -0.0007 | -0.0011 | -0.0042 | 0.00056 | 1 | -5.4e-05 | -6.8e-05 | -4.2e-05 | -6.7e-05 | -0.00053 | -0.00023 | -0.00011 | -0.00051 | -0.00035 | -0.00034 | -0.00045 | -0.00026 | -0.00034 | -0.00032 | -0.00035 | -0.00072 | -0.00039 | -0.00043 | -0.00064 | -0.00045 | -0.00064 | -7.7e-05 | -0.00011 | -0.0027 | -0.00049 | -0.0001 | -0.0014 | -0.0007 | -0.00011 | -0.0037 | -0.00037 | -0.00049 | -0.00047 | -0.00022 | -0.00042 | -0.0015 | -0.0076 | -0.0035 | -0.00088 | -0.00072 | -0.001 | -0.0002 | -0.0017 | -3.4e-06 | 0.0033 | -0.0012 | -0.0014 | -4.8e-06 | -0.00016 | 0.00089 |
| Interaction_MYYear_14Month_7 | 0.0016 | 0.0043 | 0.036 | -0.00091 | -0.00084 | 0.0077 | -0.0042 | -0.0023 | 0.00048 | 3.7e-06 | -0.00098 | -0.00074 | 0.036 | -0.0012 | 0.036 | 0.016 | 0.0045 | 0.021 | 0.044 | -0.0035 | -0.025 | -0.00013 | 0.064 | -0.00069 | -0.00048 | 0.00079 | -0.0016 | -0.00025 | -0.0011 | -0.00087 | 0.00063 | 0.0014 | -0.0027 | -0.00097 | -0.0016 | -0.0024 | 0.0041 | -0.0012 | 0.0017 | 0.007 | 0.0042 | 0.0015 | 0.0044 | 0.00074 | 0.00032 | 0.0002 | 0.0008 | -0.0013 | -0.0057 | -0.0013 | -6.1e-05 | -0.0024 | 0.00071 | 0.0049 | nan | -0.00053 | 0.00059 | -0.00086 | -0.0011 | -0.0031 | 0.0011 | -5.4e-05 | 1 | -5.1e-05 | -3.1e-05 | -5e-05 | -0.0004 | -0.00017 | -8.5e-05 | -0.00038 | -0.00026 | -0.00025 | -0.00034 | -0.0002 | -0.00025 | -0.00024 | -0.00026 | -0.00053 | -0.00029 | -0.00032 | -0.00048 | -0.00034 | -0.00048 | -5.7e-05 | -7.9e-05 | -0.002 | -0.00036 | -7.6e-05 | -0.001 | -0.00052 | -8.2e-05 | -0.0028 | -0.00027 | -0.00036 | -0.00035 | -0.00016 | -0.00031 | -0.0011 | -0.0057 | -0.0026 | -0.00065 | -0.00054 | -0.00076 | -0.00015 | -0.0014 | -2.5e-06 | 0.0019 | -0.00086 | -0.00074 | -3.4e-06 | -0.00046 | 0.00046 |
| Interaction_MYYear_14Month_10 | -0.00083 | 0.0043 | 0.051 | -0.001 | -0.0011 | 0.0058 | -0.0035 | -0.0032 | -8.3e-05 | 0.00026 | -0.0015 | -0.0017 | 0.047 | -0.00083 | 0.047 | 0.02 | 0.0044 | 0.026 | 0.065 | -0.004 | -0.032 | -0.00017 | 0.061 | -0.00091 | -0.00061 | -0.00012 | -0.00093 | -0.00032 | -0.0011 | 0.00018 | 0.00011 | 0.0004 | -0.0022 | -0.00084 | -0.00032 | -0.0023 | 0.006 | -0.00082 | 0.0021 | 0.0078 | 0.006 | 0.0024 | 0.0047 | 0.0014 | -4.3e-05 | -0.0016 | 0.00091 | -0.00078 | -0.0073 | -0.001 | -7.8e-05 | -0.0023 | 0.00011 | 0.0051 | nan | -0.00068 | 0.00098 | -0.00025 | -0.00095 | -0.0041 | -0.00095 | -6.8e-05 | -5.1e-05 | 1 | -4e-05 | -6.3e-05 | -0.0005 | -0.00022 | -0.00011 | -0.00048 | -0.00033 | -0.00032 | -0.00043 | -0.00025 | -0.00032 | -0.0003 | -0.00033 | -0.00068 | -0.00037 | -0.00041 | -0.00061 | -0.00043 | -0.00061 | -7.3e-05 | -0.0001 | -0.0026 | -0.00046 | -9.7e-05 | -0.0013 | -0.00066 | -0.0001 | -0.0035 | -0.00035 | -0.00046 | -0.00044 | -0.0002 | -0.00039 | -0.0014 | -0.0072 | -0.0033 | -0.00083 | -0.00069 | -0.00097 | -0.00019 | -0.00074 | -3.2e-06 | 0.0033 | -0.0011 | -0.0015 | -4.5e-06 | -0.0021 | -0.0001 |
| Interaction_MYYear_15Month_1 | 0.0098 | 0.0031 | 0.034 | -0.00046 | -0.00066 | 0.0043 | -0.01 | -0.0023 | -0.00034 | 0.00037 | -0.00092 | -0.00081 | 0.026 | -0.00035 | 0.026 | 0.014 | 0.0021 | 0.017 | 0.043 | -0.00025 | -0.011 | -0.0001 | 0.044 | -0.00042 | -0.0006 | 0.0041 | -0.00014 | -0.0002 | -0.0005 | 5.6e-05 | 9e-05 | 0.0013 | -0.0003 | -0.0006 | 0.00021 | -0.00083 | 0.0038 | -0.00035 | 0.0017 | 0.0055 | 0.0039 | 0.0011 | 0.0024 | 0.00088 | 0.00059 | -8.2e-05 | 0.00072 | -0.00087 | -0.0045 | -0.00057 | -4.8e-05 | -0.00028 | 8.8e-05 | 0.0019 | nan | -0.00042 | 0.00054 | 0.00035 | -0.00067 | -0.0025 | 0.00028 | -4.2e-05 | -3.1e-05 | -4e-05 | 1 | -3.9e-05 | -0.00031 | -0.00013 | -6.7e-05 | -0.0003 | -0.0002 | -0.0002 | -0.00027 | -0.00015 | -0.0002 | -0.00019 | -0.00021 | -0.00042 | -0.00023 | -0.00025 | -0.00038 | -0.00027 | -0.00038 | -4.5e-05 | -6.2e-05 | -0.0016 | -0.00029 | -6e-05 | -0.00081 | -0.00041 | -6.5e-05 | -0.0022 | -0.00021 | -0.00029 | -0.00027 | -0.00013 | -0.00024 | -0.00086 | -0.0045 | -0.002 | -0.00051 | -0.00042 | -0.0006 | -0.00012 | -0.00033 | -2e-06 | 0.001 | -0.00068 | -0.00067 | -2.8e-06 | -0.00055 | 0.00027 |
| Interaction_MYYear_15Month_3 | 0.046 | 0.0022 | 0.033 | -0.00039 | -0.001 | 0.0042 | -0.044 | -0.0028 | 0.00082 | -0.00055 | -0.0017 | -0.00093 | 0.032 | 0.0057 | 0.068 | 0.044 | 0.00042 | 0.029 | 0.067 | 0.01 | -0.004 | -0.00016 | 0.075 | -0.0012 | 0.00017 | 0.0086 | -0.0018 | -0.00032 | 0.00028 | 0.0022 | -1.2e-05 | 0.0035 | 0.00022 | -0.00098 | -0.0003 | -0.00053 | 0.0059 | 0.0051 | 0.0024 | 0.011 | 0.0055 | -0.0014 | 0.0003 | 0.0016 | -0.00026 | -0.00022 | 0.00016 | 0.00083 | -0.0071 | -0.0011 | -7.7e-05 | -0.00042 | 0.0012 | -0.003 | nan | -0.00017 | -0.0013 | -0.0014 | -0.0017 | -0.0044 | 0.0012 | -6.7e-05 | -5e-05 | -6.3e-05 | -3.9e-05 | 1 | -0.00049 | -0.00021 | -0.00011 | -0.00047 | -0.00032 | -0.00032 | -0.00042 | -0.00024 | -0.00031 | -0.0003 | -0.00033 | -0.00066 | -0.00036 | -0.0004 | -0.00059 | -0.00042 | -0.0006 | -7.1e-05 | -9.9e-05 | -0.0025 | -0.00045 | -9.5e-05 | -0.0013 | -0.00065 | -0.0001 | -0.0035 | -0.00034 | -0.00045 | -0.00043 | -0.0002 | -0.00039 | -0.0014 | -0.0071 | -0.0032 | -0.00081 | -0.00067 | -0.00095 | -0.00018 | -0.0014 | -3.2e-06 | 0.0015 | -0.0011 | -0.002 | -4.4e-06 | -0.0028 | -0.0021 |
| Interaction_MYYear_15Month_7 | -0.0041 | 0.015 | 0.13 | -0.0074 | -0.0083 | -0.0022 | 0.0044 | 0.03 | -0.01 | -0.006 | -0.023 | -0.015 | -0.0096 | 0.0023 | -0.0087 | 0.12 | -0.055 | 0.13 | 0.04 | 0.095 | -0.036 | -0.0013 | 0.072 | 0.0028 | -0.0034 | -0.0021 | -0.014 | -0.0025 | -0.00044 | -0.0032 | -0.00043 | 0.0044 | 0.002 | 0.0082 | -0.00078 | -0.0069 | 0.023 | 0.0022 | 0.11 | 0.14 | 0.026 | 0.051 | 0.023 | -0.023 | -0.031 | -0.029 | -0.076 | 0.077 | 0.06 | 0.12 | -8.2e-05 | 0.0027 | -0.0038 | -0.044 | nan | 0.005 | -0.016 | -0.011 | -0.014 | 0.00077 | 0.0047 | -0.00053 | -0.0004 | -0.0005 | -0.00031 | -0.00049 | 1 | -0.0017 | -0.00085 | -0.0037 | -0.0026 | -0.0025 | -0.0033 | -0.0019 | -0.0025 | -0.0024 | -0.0026 | -0.0053 | -0.0029 | -0.0032 | -0.0047 | -0.0033 | -0.0047 | -0.00057 | -0.00079 | -0.02 | -0.0036 | -0.00076 | -0.01 | -0.0052 | -0.00081 | -0.028 | -0.0027 | -0.0036 | -0.0035 | -0.0016 | -0.0031 | -0.011 | -0.056 | -0.026 | -0.0065 | -0.0053 | -0.0075 | -0.0015 | 0.012 | -2.5e-05 | 0.021 | -0.0074 | -0.015 | -3.4e-05 | -0.022 | -0.014 |
| Interaction_MYYear_15Month_8 | -0.0027 | 0.0076 | 0.057 | -0.0033 | -0.0036 | -0.00086 | 0.0028 | 0.014 | -0.0038 | -0.003 | -0.0055 | -0.0029 | -0.0044 | 0.005 | -0.0039 | 0.053 | -0.013 | 0.057 | 0.15 | 0.012 | -0.015 | -0.00056 | 0.064 | 0.0022 | -0.00043 | -0.0013 | 0.00016 | -0.0011 | 0.00064 | -0.002 | 0.00017 | 0.0017 | 0.00076 | 0.003 | -0.00074 | -0.0019 | 0.12 | 0.0049 | 0.053 | 0.059 | 0.12 | 0.0045 | -0.003 | 0.00026 | -0.0087 | -0.01 | -0.032 | 0.016 | 0.031 | 0.05 | -0.00026 | 0.00036 | -0.0014 | -0.0097 | nan | -0.00055 | -0.0004 | -0.002 | 0.0018 | 0.00046 | 0.0015 | -0.00023 | -0.00017 | -0.00022 | -0.00013 | -0.00021 | -0.0017 | 1 | -0.00036 | -0.0016 | -0.0011 | -0.0011 | -0.0014 | -0.00083 | -0.0011 | -0.001 | -0.0011 | -0.0023 | -0.0012 | -0.0014 | -0.002 | -0.0014 | -0.002 | -0.00024 | -0.00034 | -0.0086 | -0.0015 | -0.00032 | -0.0044 | -0.0022 | -0.00035 | -0.012 | -0.0012 | -0.0015 | -0.0015 | -0.00068 | -0.0013 | -0.0047 | -0.024 | -0.011 | -0.0028 | -0.0023 | -0.0032 | -0.00063 | 0.0064 | -1.1e-05 | 0.0042 | -0.002 | -0.0085 | -1.5e-05 | -0.0093 | -0.0021 |
| Interaction_MYYear_15Month_9 | 0.002 | 0.0047 | 0.03 | -0.0018 | -0.0018 | 0.00048 | -0.0019 | 0.0034 | -0.00095 | -0.0015 | 0.00068 | 6.5e-05 | -0.00036 | 0.0036 | -0.0002 | 0.027 | 0.0034 | 0.03 | 0.1 | 0.00026 | -0.0083 | -0.00028 | 0.037 | -0.00023 | 0.00034 | 0.00016 | 0.0041 | -0.00054 | 0.0026 | -0.00047 | -0.0012 | -0.0013 | 0.0013 | 0.0014 | -0.0015 | -0.00039 | 0.071 | 0.0036 | 0.026 | 0.029 | 0.065 | 0.003 | 0.0045 | 0.0085 | -0.0035 | -0.0044 | -0.014 | 0.00089 | 0.012 | 0.022 | -0.00013 | 0.0016 | -0.00033 | 0.0014 | nan | -0.00085 | 0.0024 | -0.0018 | 0.0042 | -0.00022 | 0.00035 | -0.00011 | -8.5e-05 | -0.00011 | -6.7e-05 | -0.00011 | -0.00085 | -0.00036 | 1 | -0.00081 | -0.00055 | -0.00054 | -0.00072 | -0.00042 | -0.00054 | -0.00051 | -0.00056 | -0.0011 | -0.00062 | -0.00068 | -0.001 | -0.00072 | -0.001 | -0.00012 | -0.00017 | -0.0043 | -0.00077 | -0.00016 | -0.0022 | -0.0011 | -0.00018 | -0.0059 | -0.00058 | -0.00077 | -0.00074 | -0.00034 | -0.00066 | -0.0023 | -0.012 | -0.0055 | -0.0014 | -0.0012 | -0.0016 | -0.00032 | 0.0012 | -5.4e-06 | -0.00011 | -0.001 | -0.0064 | -7.6e-06 | -0.0049 | 0.0022 |
| Interaction_MYYear_15Month_10 | -0.0019 | 0.0076 | 0.069 | -0.0064 | -0.0079 | -0.0039 | 0.002 | -0.0031 | -0.0032 | -0.0016 | -0.013 | -0.0033 | -0.009 | 0.0095 | -0.0082 | 0.087 | -0.026 | 0.094 | 0.067 | 0.016 | -0.013 | -0.0012 | 0.13 | 0.00043 | -0.0033 | -0.0011 | 0.0024 | -0.0024 | 0.0028 | 5.9e-05 | 0.0024 | 0.0087 | 0.023 | -0.012 | -0.0019 | 0.0021 | 0.045 | 0.0095 | 0.03 | 0.096 | -0.034 | -0.0057 | -0.019 | 0.0017 | -0.048 | -0.0085 | -0.064 | 0.069 | -0.0097 | -0.0094 | -0.00058 | -0.00088 | 0.0013 | -0.029 | nan | 0.0064 | -0.0069 | -0.0086 | -0.0028 | -0.006 | -0.0013 | -0.00051 | -0.00038 | -0.00048 | -0.0003 | -0.00047 | -0.0037 | -0.0016 | -0.00081 | 1 | -0.0024 | -0.0024 | -0.0032 | -0.0019 | -0.0024 | -0.0023 | -0.0025 | -0.005 | -0.0027 | -0.003 | -0.0045 | -0.0032 | -0.0045 | -0.00054 | -0.00075 | -0.019 | -0.0034 | -0.00072 | -0.0097 | -0.0049 | -0.00078 | -0.026 | -0.0026 | -0.0034 | -0.0033 | -0.0015 | -0.0029 | -0.01 | -0.054 | -0.024 | -0.0062 | -0.0051 | -0.0072 | -0.0014 | -0.00048 | -2.4e-05 | 0.02 | -0.0061 | -0.0099 | -3.3e-05 | -0.016 | -0.0092 |
| Interaction_MYYear_15Month_11 | -0.0041 | 0.017 | 0.048 | -0.0048 | -0.0054 | -0.0013 | 0.0042 | 0.0095 | -0.0047 | -0.0037 | -0.0087 | -0.0039 | -0.0061 | 0.011 | -0.0055 | 0.06 | -0.0047 | 0.065 | 0.12 | 0.0011 | -0.023 | -0.00085 | 0.077 | 0.00016 | 0.00049 | -0.00044 | 0.0052 | -0.0016 | 0.0024 | -0.0033 | -0.0024 | -0.0017 | 0.007 | -0.007 | -0.0046 | 0.00097 | 0.095 | 0.011 | 0.02 | 0.068 | 0.099 | 0.00026 | -0.0044 | 0.013 | -0.025 | -0.014 | -0.044 | 0.028 | 0.0071 | -0.0065 | 3.9e-06 | 0.0054 | -0.00058 | -0.0063 | nan | -0.0022 | 0.0027 | -0.0019 | 0.0078 | -0.0039 | 0.0019 | -0.00035 | -0.00026 | -0.00033 | -0.0002 | -0.00032 | -0.0026 | -0.0011 | -0.00055 | -0.0024 | 1 | -0.0017 | -0.0022 | -0.0013 | -0.0016 | -0.0015 | -0.0017 | -0.0035 | -0.0019 | -0.0021 | -0.0031 | -0.0022 | -0.0031 | -0.00037 | -0.00051 | -0.013 | -0.0024 | -0.00049 | -0.0067 | -0.0034 | -0.00053 | -0.018 | -0.0018 | -0.0024 | -0.0023 | -0.001 | -0.002 | -0.0071 | -0.037 | -0.017 | -0.0042 | -0.0035 | -0.0049 | -0.00096 | 0.0048 | -1.6e-05 | 0.0037 | -0.0036 | -0.017 | -2.3e-05 | -0.015 | -0.0002 |
| Interaction_MYYear_16Month_1 | 0.0045 | 0.011 | 0.053 | -0.004 | -0.0054 | -0.00058 | -0.0041 | -0.0044 | -4.4e-05 | 0.0001 | -0.0092 | -0.0042 | 0.00063 | 0.01 | 0.0012 | 0.061 | -0.01 | 0.067 | 0.078 | 0.0076 | -0.011 | -0.00084 | 0.057 | -0.0015 | -0.0015 | 0.0023 | 0.0042 | -0.0016 | 0.0071 | 0.002 | 0.0021 | 0.0084 | 0.015 | -0.0072 | -0.00058 | 0.0026 | 0.047 | 0.01 | 0.022 | 0.065 | 0.047 | -0.003 | -0.008 | 0.0042 | -0.0032 | -0.0041 | -0.034 | 0.027 | -0.0065 | -0.0049 | -0.00039 | 0.0048 | 0.0041 | -0.016 | nan | 0.0043 | -0.00085 | -0.0038 | 0.0018 | -0.0076 | 0.0012 | -0.00034 | -0.00025 | -0.00032 | -0.0002 | -0.00032 | -0.0025 | -0.0011 | -0.00054 | -0.0024 | -0.0017 | 1 | -0.0022 | -0.0013 | -0.0016 | -0.0015 | -0.0017 | -0.0034 | -0.0018 | -0.002 | -0.003 | -0.0021 | -0.0031 | -0.00036 | -0.00051 | -0.013 | -0.0023 | -0.00049 | -0.0066 | -0.0033 | -0.00052 | -0.018 | -0.0017 | -0.0023 | -0.0022 | -0.001 | -0.002 | -0.007 | -0.036 | -0.017 | -0.0042 | -0.0034 | -0.0049 | -0.00095 | -0.0031 | -1.6e-05 | 0.013 | -0.0038 | -0.0073 | -2.3e-05 | -0.0094 | -0.0043 |
| Interaction_MYYear_16Month_2 | 0.0064 | 0.02 | 0.065 | -0.0052 | -0.0071 | -0.00028 | -0.0059 | 0.0021 | 0.00041 | -0.002 | -0.0042 | -0.0034 | -0.0058 | 0.0052 | -0.005 | 0.08 | 0.0042 | 0.087 | 0.076 | -0.0041 | -0.023 | -0.0011 | 0.071 | -0.0016 | -0.00046 | -5.3e-05 | 0.0084 | -0.0021 | 0.0025 | -0.0022 | 0.00073 | -0.00086 | 0.013 | -0.0094 | -0.0023 | 0.0015 | -0.0046 | 0.0052 | 0.027 | 0.087 | -0.005 | 0.002 | 0.0031 | 0.024 | -0.006 | -0.0067 | -0.052 | 0.02 | 0.013 | -0.0078 | -0.00052 | -0.0013 | -0.00073 | 0.00059 | nan | 0.001 | -5e-06 | -0.0065 | 0.0091 | -0.0085 | 0.00016 | -0.00045 | -0.00034 | -0.00043 | -0.00027 | -0.00042 | -0.0033 | -0.0014 | -0.00072 | -0.0032 | -0.0022 | -0.0022 | 1 | -0.0017 | -0.0021 | -0.002 | -0.0022 | -0.0045 | -0.0024 | -0.0027 | -0.004 | -0.0028 | -0.004 | -0.00048 | -0.00067 | -0.017 | -0.0031 | -0.00064 | -0.0087 | -0.0044 | -0.00069 | -0.024 | -0.0023 | -0.0031 | -0.0029 | -0.0014 | -0.0026 | -0.0093 | -0.048 | -0.022 | -0.0055 | -0.0046 | -0.0064 | -0.0013 | -0.0019 | -2.1e-05 | 0.0022 | -0.0028 | -0.012 | -3e-05 | -0.014 | -0.0016 |
| Interaction_MYYear_16Month_3 | 0.016 | 0.027 | 0.056 | -0.0032 | -0.0041 | 0.0031 | -0.015 | 0.0027 | 4.5e-05 | -0.00088 | 0.0018 | 0.0013 | 0.022 | 0.0068 | 0.023 | 0.053 | 0.0092 | 0.059 | 0.1 | -0.006 | -0.026 | -0.00064 | 0.056 | -0.003 | -0.0011 | 0.0017 | 0.0068 | -0.00099 | 0.002 | -0.00084 | 0.0017 | 0.00093 | 0.0021 | -0.0041 | 0.0038 | 0.0022 | 0.033 | 0.0069 | 0.013 | 0.05 | 0.034 | -0.00077 | 0.0042 | 0.016 | -0.0046 | -0.0053 | -0.022 | -0.0031 | 0.0052 | -0.0058 | 0.00023 | 0.0026 | -0.0012 | 0.0033 | nan | -0.0021 | 0.0066 | -0.0035 | 0.011 | -0.007 | 0.0038 | -0.00026 | -0.0002 | -0.00025 | -0.00015 | -0.00024 | -0.0019 | -0.00083 | -0.00042 | -0.0019 | -0.0013 | -0.0013 | -0.0017 | 1 | -0.0012 | -0.0012 | -0.0013 | -0.0026 | -0.0014 | -0.0016 | -0.0023 | -0.0017 | -0.0023 | -0.00028 | -0.00039 | -0.0099 | -0.0018 | -0.00037 | -0.0051 | -0.0026 | -0.0004 | -0.014 | -0.0013 | -0.0018 | -0.0017 | -0.00079 | -0.0015 | -0.0054 | -0.028 | -0.013 | -0.0032 | -0.0026 | -0.0037 | -0.00073 | -0.0028 | -1.2e-05 | -0.0024 | -0.0021 | -0.0096 | -1.8e-05 | -0.009 | 0.00083 |
| Interaction_MYYear_16Month_4 | 0.11 | 0.022 | 0.085 | -0.0039 | -0.0053 | 0.0061 | -0.1 | -0.0032 | 0.0046 | -0.0014 | -0.0028 | -9.5e-06 | 0.029 | 0.016 | 0.029 | 0.084 | 0.0055 | 0.091 | 0.04 | 0.018 | -0.014 | -0.00083 | 0.095 | -0.0061 | 0.0017 | 0.022 | 0.0031 | -0.0016 | 0.0033 | 0.0052 | 0.0049 | 0.0078 | 0.0089 | -0.0036 | -0.0023 | 0.0015 | 0.026 | 0.016 | 0.021 | 0.066 | 0.028 | -0.0081 | -0.0018 | 0.013 | -0.0018 | -0.0012 | -0.019 | -0.0039 | -0.009 | -0.0044 | -0.00039 | 0.0057 | 0.0021 | -0.01 | nan | -0.0021 | -0.0011 | -0.004 | 0.0046 | -0.013 | 0.0077 | -0.00034 | -0.00025 | -0.00032 | -0.0002 | -0.00031 | -0.0025 | -0.0011 | -0.00054 | -0.0024 | -0.0016 | -0.0016 | -0.0021 | -0.0012 | 1 | -0.0015 | -0.0016 | -0.0034 | -0.0018 | -0.002 | -0.003 | -0.0021 | -0.003 | -0.00036 | -0.0005 | -0.013 | -0.0023 | -0.00048 | -0.0065 | -0.0033 | -0.00052 | -0.018 | -0.0017 | -0.0023 | -0.0022 | -0.001 | -0.002 | -0.0069 | -0.036 | -0.016 | -0.0041 | -0.0034 | -0.0048 | -0.00093 | -0.0062 | -1.6e-05 | 0.0014 | -0.0025 | -0.0089 | -2.3e-05 | -0.012 | -0.0037 |
| Interaction_MYYear_16Month_5 | -0.0012 | 0.028 | 0.044 | -0.0035 | -0.005 | 0.00069 | 0.001 | 0.0031 | 0.00028 | -0.00097 | -0.0055 | -0.0026 | -0.0058 | 0.0075 | -0.0054 | 0.055 | 0.0004 | 0.059 | 0.031 | -0.003 | -0.019 | -0.00078 | 0.039 | -0.0022 | 0.00018 | -0.00093 | -0.00043 | -0.0015 | 0.0015 | -0.00039 | 0.0016 | 0.0027 | 0.0065 | -0.0041 | 0.00015 | -0.0054 | 0.02 | 0.0075 | 0.017 | 0.062 | 0.022 | -0.0059 | -0.0043 | 0.0098 | -0.0087 | -0.0078 | -0.028 | 0.001 | 0.0044 | -0.0068 | 6.8e-05 | 0.0033 | -0.00093 | -0.0019 | nan | -0.0015 | 0.0017 | -0.0026 | 0.0003 | -0.0084 | 0.0037 | -0.00032 | -0.00024 | -0.0003 | -0.00019 | -0.0003 | -0.0024 | -0.001 | -0.00051 | -0.0023 | -0.0015 | -0.0015 | -0.002 | -0.0012 | -0.0015 | 1 | -0.0016 | -0.0032 | -0.0017 | -0.0019 | -0.0029 | -0.002 | -0.0029 | -0.00034 | -0.00047 | -0.012 | -0.0022 | -0.00046 | -0.0062 | -0.0031 | -0.00049 | -0.017 | -0.0016 | -0.0022 | -0.0021 | -0.00096 | -0.0019 | -0.0066 | -0.034 | -0.015 | -0.0039 | -0.0032 | -0.0045 | -0.00089 | -0.0039 | -1.5e-05 | 0.00084 | -0.0024 | -0.0062 | -2.1e-05 | -0.011 | -0.0011 |
| Interaction_MYYear_16Month_6 | -0.0038 | 0.022 | 0.11 | -0.0039 | -0.0055 | -0.00012 | 0.0032 | 0.0041 | 0.00019 | -0.0016 | -0.011 | -0.0037 | 0.00016 | 0.0064 | 0.00073 | 0.062 | -0.017 | 0.068 | 0.066 | 0.01 | -0.014 | -0.00086 | 0.095 | -0.00063 | -0.00045 | -0.0021 | 0.0032 | -0.0017 | 0.0014 | 0.00091 | 0.0062 | 0.0088 | 0.014 | -0.00085 | 0.0002 | 0.0039 | 0.061 | 0.0065 | 0.025 | 0.069 | 0.062 | -0.0047 | -0.014 | -0.0034 | -0.016 | -0.015 | -0.031 | 0.014 | 0.00087 | -0.0046 | 0.0004 | 0.0011 | 0.00047 | -0.016 | nan | 0.0011 | -0.0016 | -0.0041 | -0.00087 | -0.011 | 0.0017 | -0.00035 | -0.00026 | -0.00033 | -0.00021 | -0.00033 | -0.0026 | -0.0011 | -0.00056 | -0.0025 | -0.0017 | -0.0017 | -0.0022 | -0.0013 | -0.0016 | -0.0016 | 1 | -0.0035 | -0.0019 | -0.0021 | -0.0031 | -0.0022 | -0.0031 | -0.00037 | -0.00052 | -0.013 | -0.0024 | -0.0005 | -0.0067 | -0.0034 | -0.00054 | -0.018 | -0.0018 | -0.0024 | -0.0023 | -0.001 | -0.002 | -0.0072 | -0.037 | -0.017 | -0.0043 | -0.0035 | -0.005 | -0.00097 | -0.0042 | -1.7e-05 | 0.011 | -0.0036 | -0.0058 | -2.3e-05 | -0.013 | -0.006 |
| Interaction_MYYear_16Month_7 | -0.0086 | 0.013 | 0.056 | 0.0014 | 0.00028 | -0.0032 | 0.0088 | -0.0094 | -0.0028 | -0.0012 | -0.017 | -0.0098 | -0.014 | -0.0039 | -0.013 | 0.077 | -0.0081 | 0.083 | -0.0069 | 0.0043 | -0.018 | -0.0017 | -0.037 | 0.0014 | -0.0032 | -0.0031 | -0.00081 | -0.002 | 0.0034 | -0.0041 | 0.0018 | -0.00025 | -0.0036 | -0.014 | 5.3e-06 | -0.0025 | -0.0034 | -0.0039 | -0.036 | 0.086 | -0.0038 | 0.0035 | -0.0052 | 0.019 | -0.011 | -0.014 | -0.075 | 0.06 | 0.0032 | -0.0093 | -3.6e-05 | -0.003 | -0.0033 | -0.017 | nan | 0.0047 | -0.0057 | -0.0081 | 0.00015 | -0.019 | -0.0047 | -0.00072 | -0.00053 | -0.00068 | -0.00042 | -0.00066 | -0.0053 | -0.0023 | -0.0011 | -0.005 | -0.0035 | -0.0034 | -0.0045 | -0.0026 | -0.0034 | -0.0032 | -0.0035 | 1 | -0.0039 | -0.0043 | -0.0064 | -0.0045 | -0.0064 | -0.00076 | -0.0011 | -0.027 | -0.0048 | -0.001 | -0.014 | -0.007 | -0.0011 | -0.037 | -0.0036 | -0.0048 | -0.0047 | -0.0021 | -0.0041 | -0.015 | -0.076 | -0.035 | -0.0087 | -0.0072 | -0.01 | -0.002 | -0.001 | -3.4e-05 | 0.02 | -0.0061 | -0.0082 | -4.7e-05 | -0.018 | -0.0073 |
| Interaction_MYYear_16Month_8 | -0.004 | 0.036 | 0.12 | -0.0033 | -0.0039 | 0.0042 | 0.0035 | -0.00078 | 0.0023 | -0.0018 | -6.1e-05 | 0.00085 | 0.01 | 0.011 | 0.011 | 0.064 | 0.016 | 0.071 | 0.13 | -0.015 | -0.029 | -0.00095 | 0.039 | -0.0049 | 0.0019 | -0.0025 | 0.0057 | -0.0018 | 0.0011 | -0.00084 | 0.0052 | 0.0035 | 0.0073 | 0.0015 | 0.00044 | -0.0032 | 0.05 | 0.011 | 0.014 | 0.068 | 0.053 | -0.0073 | 0.0056 | 0.022 | -0.0027 | -0.0032 | -0.015 | -0.0065 | -0.0027 | -0.0038 | 0.00028 | 0.0067 | -0.00018 | 0.0093 | nan | -0.0025 | 0.0075 | -0.0031 | 0.0081 | -0.012 | 0.0066 | -0.00039 | -0.00029 | -0.00037 | -0.00023 | -0.00036 | -0.0029 | -0.0012 | -0.00062 | -0.0027 | -0.0019 | -0.0018 | -0.0024 | -0.0014 | -0.0018 | -0.0017 | -0.0019 | -0.0039 | 1 | -0.0023 | -0.0035 | -0.0024 | -0.0035 | -0.00041 | -0.00057 | -0.015 | -0.0026 | -0.00055 | -0.0074 | -0.0038 | -0.00059 | -0.02 | -0.002 | -0.0026 | -0.0025 | -0.0012 | -0.0022 | -0.0079 | -0.041 | -0.019 | -0.0047 | -0.0039 | -0.0055 | -0.0011 | -0.0039 | -1.8e-05 | -0.005 | -0.0013 | -0.011 | -2.6e-05 | -0.014 | 0.0019 |
| Interaction_MYYear_16Month_9 | 0.014 | 0.037 | 0.11 | -0.0012 | -0.0017 | 0.004 | -0.013 | 0.0037 | -0.00016 | -0.0018 | 0.0022 | 0.0057 | 0.014 | 0.0054 | 0.015 | 0.066 | 0.014 | 0.074 | 0.16 | -0.01 | -0.03 | -0.001 | 0.043 | -0.0053 | 0.00081 | 0.0021 | -0.0026 | -0.0017 | -0.0023 | -0.0038 | 0.0057 | -0.0012 | -0.00069 | -0.0026 | -0.0024 | -0.009 | 0.1 | 0.0055 | 0.00061 | 0.067 | 0.1 | -0.0025 | 0.006 | 0.026 | -0.0056 | -0.0044 | -0.019 | -0.0051 | 0.0068 | -0.0042 | -0.00016 | 0.01 | -0.0073 | 0.013 | nan | -0.0025 | 0.0048 | -0.0032 | 0.0029 | -0.012 | 0.0032 | -0.00043 | -0.00032 | -0.00041 | -0.00025 | -0.0004 | -0.0032 | -0.0014 | -0.00068 | -0.003 | -0.0021 | -0.002 | -0.0027 | -0.0016 | -0.002 | -0.0019 | -0.0021 | -0.0043 | -0.0023 | 1 | -0.0038 | -0.0027 | -0.0038 | -0.00046 | -0.00063 | -0.016 | -0.0029 | -0.00061 | -0.0082 | -0.0042 | -0.00066 | -0.022 | -0.0022 | -0.0029 | -0.0028 | -0.0013 | -0.0025 | -0.0088 | -0.045 | -0.021 | -0.0052 | -0.0043 | -0.0061 | -0.0012 | -0.0044 | -2e-05 | -0.012 | -0.00032 | -0.009 | -2.9e-05 | -0.016 | -0.0013 |
| Interaction_MYYear_16Month_10 | 0.0069 | 0.052 | 0.16 | -0.0028 | -0.0037 | 0.0052 | -0.0063 | -0.005 | 0.0042 | -0.0032 | -0.0029 | -8.2e-05 | -0.0091 | 0.013 | -0.0081 | 0.097 | 0.026 | 0.1 | 0.19 | -0.023 | -0.036 | -0.0016 | 0.16 | -0.0088 | 0.0025 | -0.00013 | 0.011 | -0.003 | 0.0019 | -0.0035 | 0.0004 | -0.003 | 0.005 | 0.00093 | 0.002 | -0.00047 | 0.16 | 0.013 | 0.088 | 0.11 | 0.17 | -0.0073 | 0.01 | 0.04 | -0.0042 | -0.0059 | -0.036 | -0.0086 | 0.0066 | -0.012 | -7.6e-05 | 0.00013 | -0.00068 | 0.015 | nan | -0.0036 | 0.01 | -0.0036 | 0.022 | -0.019 | 0.013 | -0.00064 | -0.00048 | -0.00061 | -0.00038 | -0.00059 | -0.0047 | -0.002 | -0.001 | -0.0045 | -0.0031 | -0.003 | -0.004 | -0.0023 | -0.003 | -0.0029 | -0.0031 | -0.0064 | -0.0035 | -0.0038 | 1 | -0.004 | -0.0057 | -0.00068 | -0.00095 | -0.024 | -0.0043 | -0.00091 | -0.012 | -0.0062 | -0.00098 | -0.033 | -0.0032 | -0.0043 | -0.0042 | -0.0019 | -0.0037 | -0.013 | -0.068 | -0.031 | -0.0078 | -0.0064 | -0.0091 | -0.0018 | -0.0082 | -3e-05 | -0.01 | 0.00058 | -0.023 | -4.3e-05 | -0.021 | 0.0076 |
| Interaction_MYYear_16Month_11 | -0.0052 | 0.032 | 0.063 | 2.7e-05 | 0.00095 | 0.0029 | 0.0052 | -0.0088 | 0.0024 | -0.0012 | -0.0066 | -0.0029 | -0.0084 | 0.0042 | -0.0087 | 0.053 | 0.0032 | 0.057 | 0.087 | -0.004 | -0.022 | -0.0011 | 0.0021 | -0.0019 | 0.00022 | -0.0018 | 0.0048 | 0.00087 | 0.0024 | -0.0002 | 0.0029 | 0.0026 | -0.0016 | -0.0032 | 0.0025 | 0.00032 | 0.066 | 0.0043 | -0.015 | 0.06 | 0.065 | -0.0021 | 0.00075 | 0.01 | -0.002 | -0.0041 | -0.026 | 0.0076 | -0.00025 | -0.00073 | -0.00021 | 0.0025 | -7.2e-05 | -0.0021 | nan | -0.00057 | 0.0028 | -0.0023 | 0.0073 | -0.013 | 0.0024 | -0.00045 | -0.00034 | -0.00043 | -0.00027 | -0.00042 | -0.0033 | -0.0014 | -0.00072 | -0.0032 | -0.0022 | -0.0021 | -0.0028 | -0.0017 | -0.0021 | -0.002 | -0.0022 | -0.0045 | -0.0024 | -0.0027 | -0.004 | 1 | -0.004 | -0.00048 | -0.00067 | -0.017 | -0.0031 | -0.00064 | -0.0087 | -0.0044 | -0.00069 | -0.023 | -0.0023 | -0.0031 | -0.0029 | -0.0013 | -0.0026 | -0.0092 | -0.048 | -0.022 | -0.0055 | -0.0045 | -0.0064 | -0.0012 | -0.0025 | -2.1e-05 | 0.00099 | -0.0011 | -0.0085 | -3e-05 | -0.011 | -0.0009 |
| Interaction_MYYear_16Month_12 | -0.0073 | 0.048 | 0.06 | 0.0024 | 0.0043 | 0.0072 | 0.0068 | -0.0099 | 0.006 | -0.0023 | -0.0045 | -0.001 | -0.012 | 0.0049 | -0.013 | 0.069 | 0.018 | 0.074 | 0.14 | -0.018 | -0.033 | -0.0016 | -0.034 | -0.0056 | 0.0031 | -0.0033 | -0.0024 | -0.00011 | 0.00079 | 0.0014 | 0.0093 | 0.0025 | -0.01 | -0.0023 | 0.0029 | -0.0088 | 0.1 | 0.005 | -0.032 | 0.079 | 0.1 | -0.0056 | 0.0065 | 0.023 | 0.00041 | -0.0031 | -0.027 | -0.011 | -0.003 | -0.013 | 0.00014 | 0.004 | -0.00044 | 0.011 | nan | -0.0028 | 0.0019 | -0.0033 | 0.0032 | -0.02 | 0.011 | -0.00064 | -0.00048 | -0.00061 | -0.00038 | -0.0006 | -0.0047 | -0.002 | -0.001 | -0.0045 | -0.0031 | -0.0031 | -0.004 | -0.0023 | -0.003 | -0.0029 | -0.0031 | -0.0064 | -0.0035 | -0.0038 | -0.0057 | -0.004 | 1 | -0.00068 | -0.00095 | -0.024 | -0.0043 | -0.00091 | -0.012 | -0.0062 | -0.00098 | -0.033 | -0.0033 | -0.0043 | -0.0042 | -0.0019 | -0.0037 | -0.013 | -0.068 | -0.031 | -0.0078 | -0.0064 | -0.0091 | -0.0018 | -0.0065 | -3e-05 | -0.013 | 0.0028 | -0.0075 | -4.3e-05 | -0.016 | -0.0012 |
| Interaction_MYYear_17Month_1 | -0.00093 | 0.0011 | 0.029 | -0.00022 | 2.1e-05 | -0.00088 | 0.00099 | 0.0017 | -0.0004 | 0.0021 | -0.0013 | 0.0013 | -0.0016 | 0.0048 | -0.0015 | 0.0082 | 0.00067 | 0.0089 | 0.076 | -0.0013 | -0.00082 | -0.00019 | 0.077 | 0.00077 | -0.00027 | 0.0033 | 0.013 | -0.00036 | 0.013 | 0.0041 | -0.0017 | -0.0015 | 0.0041 | -0.0016 | 0.0095 | 0.0098 | 0.043 | 0.0048 | -0.0038 | 0.0095 | 0.0068 | -0.0024 | -0.0012 | 0.005 | -0.00068 | -0.00041 | 0.0014 | 0.0051 | 0.0042 | 0.0017 | -8.8e-05 | -0.00063 | 0.0047 | -0.0042 | nan | -0.00055 | 0.0069 | -0.00068 | 0.017 | -0.00097 | 0.00069 | -7.7e-05 | -5.7e-05 | -7.3e-05 | -4.5e-05 | -7.1e-05 | -0.00057 | -0.00024 | -0.00012 | -0.00054 | -0.00037 | -0.00036 | -0.00048 | -0.00028 | -0.00036 | -0.00034 | -0.00037 | -0.00076 | -0.00041 | -0.00046 | -0.00068 | -0.00048 | -0.00068 | 1 | -0.00011 | -0.0029 | -0.00052 | -0.00011 | -0.0015 | -0.00074 | -0.00012 | -0.004 | -0.00039 | -0.00052 | -0.0005 | -0.00023 | -0.00044 | -0.0016 | -0.0081 | -0.0037 | -0.00093 | -0.00077 | -0.0011 | -0.00021 | -0.00036 | -3.6e-06 | 0.0032 | -0.00096 | -0.0054 | -5.2e-06 | -0.00019 | 0.0063 |
| Interaction_MYYear_17Month_2 | 0.037 | 0.013 | 0.03 | 0.00032 | -0.0017 | 0.0024 | -0.036 | -0.0039 | 0.0001 | -2.9e-05 | -0.0028 | -0.0023 | 0.062 | 0.0026 | 0.062 | 0.036 | 0.00028 | 0.043 | 0.037 | 0.0058 | -0.015 | -0.00026 | 0.054 | -0.00075 | -0.0025 | 0.011 | -0.0015 | -0.0005 | 0.00019 | 0.001 | 5.3e-05 | 0.0037 | -0.00099 | -0.00081 | -0.00075 | -0.0012 | 0.011 | 0.0023 | 0.0032 | 0.018 | 0.011 | 2e-05 | 0.0017 | 0.0014 | -0.00071 | -0.00093 | -0.00026 | 0.00029 | -0.011 | -0.0022 | -0.00012 | -0.00063 | 0.00045 | 0.00086 | nan | -0.00029 | -0.0011 | -0.001 | -0.0017 | -0.0075 | 0.0016 | -0.00011 | -7.9e-05 | -0.0001 | -6.2e-05 | -9.9e-05 | -0.00079 | -0.00034 | -0.00017 | -0.00075 | -0.00051 | -0.00051 | -0.00067 | -0.00039 | -0.0005 | -0.00047 | -0.00052 | -0.0011 | -0.00057 | -0.00063 | -0.00095 | -0.00067 | -0.00095 | -0.00011 | 1 | -0.004 | -0.00072 | -0.00015 | -0.002 | -0.001 | -0.00016 | -0.0055 | -0.00054 | -0.00072 | -0.00069 | -0.00032 | -0.00061 | -0.0022 | -0.011 | -0.0051 | -0.0013 | -0.0011 | -0.0015 | -0.00029 | -0.00011 | -5e-06 | 0.0015 | -0.0013 | -0.0016 | -6.9e-06 | -0.0031 | -0.0024 |
| Interaction_MYYear_17Month_3 | -0.031 | 0.059 | 0.15 | -0.0062 | -0.012 | -0.00068 | 0.03 | -0.042 | 0.003 | 0.0017 | -0.0033 | -0.023 | -0.056 | -0.01 | -0.052 | 0.13 | 0.068 | 0.14 | 0.021 | -0.077 | -0.051 | -0.0066 | 0.17 | -0.025 | 0.0046 | -0.0085 | 0.0059 | -0.012 | 0.01 | -0.015 | 0.0028 | -0.036 | -0.018 | -0.021 | 0.021 | -0.024 | 0.25 | -0.01 | 0.49 | 0.16 | 0.23 | -0.011 | 0.026 | 0.15 | -0.0098 | -0.021 | 0.14 | -0.0051 | 0.068 | -0.048 | -0.0012 | 0.0088 | -0.011 | 0.07 | nan | -0.0078 | 0.014 | 0.0025 | 0.028 | -0.027 | -0.0039 | -0.0027 | -0.002 | -0.0026 | -0.0016 | -0.0025 | -0.02 | -0.0086 | -0.0043 | -0.019 | -0.013 | -0.013 | -0.017 | -0.0099 | -0.013 | -0.012 | -0.013 | -0.027 | -0.015 | -0.016 | -0.024 | -0.017 | -0.024 | -0.0029 | -0.004 | 1 | -0.018 | -0.0039 | -0.052 | -0.026 | -0.0041 | -0.14 | -0.014 | -0.018 | -0.018 | -0.0081 | -0.016 | -0.055 | -0.29 | -0.13 | -0.033 | -0.027 | -0.038 | -0.0075 | -0.022 | -0.00013 | -0.028 | 0.012 | -0.026 | -0.00018 | -0.031 | 0.028 |
| Interaction_MYYear_17Month_4 | 0.0036 | 0.041 | 0.13 | -0.0017 | -0.0013 | 0.0037 | -0.0034 | -0.0078 | 0.002 | -0.0014 | -0.0023 | 0.00035 | 0.0025 | 0.0071 | 0.0022 | 0.066 | 0.015 | 0.072 | 0.14 | -0.013 | -0.028 | -0.0012 | 0.096 | -0.0043 | 0.0014 | -0.00015 | 0.0015 | 0.00062 | 0.0014 | -0.0011 | 0.0028 | 0.0012 | -0.0033 | 0.0029 | 0.0014 | -0.0057 | 0.13 | 0.0071 | 0.018 | 0.07 | 0.14 | -0.0038 | 0.0063 | 0.02 | -0.00098 | -0.003 | 0.021 | -0.0092 | -0.0046 | -0.0056 | -0.00027 | 0.0042 | -0.0021 | 0.0094 | nan | -0.0017 | 0.0059 | -0.002 | 0.0072 | -0.017 | 0.0059 | -0.00049 | -0.00036 | -0.00046 | -0.00029 | -0.00045 | -0.0036 | -0.0015 | -0.00077 | -0.0034 | -0.0024 | -0.0023 | -0.0031 | -0.0018 | -0.0023 | -0.0022 | -0.0024 | -0.0048 | -0.0026 | -0.0029 | -0.0043 | -0.0031 | -0.0043 | -0.00052 | -0.00072 | -0.018 | 1 | -0.00069 | -0.0093 | -0.0047 | -0.00075 | -0.025 | -0.0025 | -0.0033 | -0.0032 | -0.0015 | -0.0028 | -0.01 | -0.052 | -0.023 | -0.0059 | -0.0049 | -0.0069 | -0.0013 | -0.0035 | -2.3e-05 | -0.01 | 0.0013 | -0.013 | -3.3e-05 | -0.015 | 0.0014 |
| Interaction_MYYear_17Month_5 | 0.059 | 0.0092 | 0.0066 | -0.00023 | -0.0016 | 0.0055 | -0.056 | -0.0035 | 0.00083 | 0.00041 | -0.0022 | -0.0017 | 0.052 | 0.0037 | 0.052 | 0.037 | -0.00093 | 0.043 | 0.074 | 0.01 | -0.0077 | -0.00025 | 0.077 | -0.0021 | -0.00025 | 0.015 | -0.002 | -0.00048 | -0.00064 | 0.0015 | 0.001 | 0.0046 | -0.0014 | -0.00081 | -0.0012 | -0.0013 | 0.01 | 0.004 | 0.0036 | 0.017 | 0.01 | -0.0016 | -0.00024 | 0.0016 | 0.00032 | 0.0002 | 0.00026 | -0.0011 | -0.011 | -0.0021 | -0.00012 | -3e-05 | 0.001 | -0.0024 | nan | 9.8e-05 | -0.0011 | -0.0014 | -0.002 | -0.0071 | 0.0017 | -0.0001 | -7.6e-05 | -9.7e-05 | -6e-05 | -9.5e-05 | -0.00076 | -0.00032 | -0.00016 | -0.00072 | -0.00049 | -0.00049 | -0.00064 | -0.00037 | -0.00048 | -0.00046 | -0.0005 | -0.001 | -0.00055 | -0.00061 | -0.00091 | -0.00064 | -0.00091 | -0.00011 | -0.00015 | -0.0039 | -0.00069 | 1 | -0.002 | -0.00099 | -0.00016 | -0.0053 | -0.00052 | -0.00069 | -0.00066 | -0.00031 | -0.00059 | -0.0021 | -0.011 | -0.0049 | -0.0012 | -0.001 | -0.0015 | -0.00028 | -0.0019 | -4.8e-06 | 0.00033 | -0.001 | -0.0019 | -6.8e-06 | -0.0033 | -0.0026 |
| Interaction_MYYear_17Month_6 | -0.016 | 0.028 | 0.095 | -0.015 | -0.02 | 0.00089 | 0.017 | 0.014 | -0.0091 | -0.011 | -0.032 | -0.016 | -0.024 | 0.038 | -0.023 | 0.3 | -0.059 | 0.33 | 0.11 | 0.12 | -0.046 | -0.0034 | 0.2 | 0.0077 | -0.0047 | -0.008 | 0.00011 | -0.0029 | 0.0049 | -0.0096 | -0.0027 | 0.0053 | 0.015 | 0.014 | -0.0016 | -0.008 | 0.082 | 0.038 | 0.2 | 0.35 | 0.092 | 0.07 | 0.048 | 0.0024 | -0.037 | -0.041 | -0.16 | 0.11 | 0.13 | 0.25 | -0.00044 | 0.01 | -0.01 | -0.051 | nan | 0.01 | -0.003 | -0.018 | 0.012 | -0.014 | 0.0036 | -0.0014 | -0.001 | -0.0013 | -0.00081 | -0.0013 | -0.01 | -0.0044 | -0.0022 | -0.0097 | -0.0067 | -0.0066 | -0.0087 | -0.0051 | -0.0065 | -0.0062 | -0.0067 | -0.014 | -0.0074 | -0.0082 | -0.012 | -0.0087 | -0.012 | -0.0015 | -0.002 | -0.052 | -0.0093 | -0.002 | 1 | -0.013 | -0.0021 | -0.072 | -0.007 | -0.0093 | -0.009 | -0.0041 | -0.008 | -0.028 | -0.15 | -0.067 | -0.017 | -0.014 | -0.02 | -0.0038 | 0.029 | -6.5e-05 | 0.014 | -0.01 | -0.051 | -9e-05 | -0.052 | -0.0086 |
| Interaction_MYYear_17Month_7 | -0.0017 | 0.055 | 0.18 | -0.0033 | -0.003 | 0.0043 | 0.0021 | -0.0075 | 0.0021 | -0.0027 | -0.0017 | 0.0023 | -0.0006 | 0.013 | -0.0021 | 0.09 | 0.029 | 0.099 | 0.17 | -0.026 | -0.039 | -0.0017 | 0.12 | -0.0069 | 0.0037 | -0.0028 | 0.0098 | 0.0039 | 0.0019 | -0.0047 | 0.0013 | -0.003 | -0.00022 | 0.0035 | 0.0039 | -0.0043 | 0.13 | 0.013 | 0.037 | 0.1 | 0.14 | -0.0078 | 0.011 | 0.035 | -0.0042 | -0.0058 | 0.041 | -0.016 | -0.0015 | -0.016 | 2.1e-06 | 0.0073 | -0.0047 | 0.02 | nan | -0.0028 | 0.013 | -0.0015 | 0.021 | -0.023 | 0.0073 | -0.0007 | -0.00052 | -0.00066 | -0.00041 | -0.00065 | -0.0052 | -0.0022 | -0.0011 | -0.0049 | -0.0034 | -0.0033 | -0.0044 | -0.0026 | -0.0033 | -0.0031 | -0.0034 | -0.007 | -0.0038 | -0.0042 | -0.0062 | -0.0044 | -0.0062 | -0.00074 | -0.001 | -0.026 | -0.0047 | -0.00099 | -0.013 | 1 | -0.0011 | -0.036 | -0.0035 | -0.0047 | -0.0045 | -0.0021 | -0.004 | -0.014 | -0.074 | -0.034 | -0.0085 | -0.007 | -0.0099 | -0.0019 | -0.0066 | -3.3e-05 | -0.015 | 0.0012 | -0.021 | -4.7e-05 | -0.023 | 0.0074 |
| Interaction_MYYear_17Month_8 | 0.039 | 0.011 | 0.0082 | 0.00023 | -0.0016 | 0.0043 | -0.037 | -0.0036 | 0.0021 | 0.0002 | -0.0022 | -0.0017 | 0.062 | 0.0019 | 0.062 | 0.036 | 0.00028 | 0.044 | 0.089 | 0.0072 | -0.013 | -0.00027 | 0.11 | -0.00039 | -0.0021 | 0.0094 | -0.002 | -0.00052 | -0.00015 | 0.0017 | 0.00087 | 0.0044 | -0.00099 | -0.00072 | -0.0012 | -0.0018 | 0.013 | 0.0022 | 0.0049 | 0.017 | 0.013 | 0.00089 | 0.002 | 0.0012 | -1.4e-05 | -0.00051 | 6.1e-05 | 0.00018 | -0.011 | -0.0013 | -0.00013 | -8.2e-05 | 0.0012 | 0.00018 | nan | -0.00065 | -0.00043 | -0.0013 | -0.0017 | -0.0074 | 0.0012 | -0.00011 | -8.2e-05 | -0.0001 | -6.5e-05 | -0.0001 | -0.00081 | -0.00035 | -0.00018 | -0.00078 | -0.00053 | -0.00052 | -0.00069 | -0.0004 | -0.00052 | -0.00049 | -0.00054 | -0.0011 | -0.00059 | -0.00066 | -0.00098 | -0.00069 | -0.00098 | -0.00012 | -0.00016 | -0.0041 | -0.00075 | -0.00016 | -0.0021 | -0.0011 | 1 | -0.0057 | -0.00056 | -0.00075 | -0.00072 | -0.00033 | -0.00064 | -0.0023 | -0.012 | -0.0053 | -0.0013 | -0.0011 | -0.0016 | -0.0003 | 0.00041 | -5.2e-06 | 0.00055 | -0.0014 | -0.0019 | -7.3e-06 | -0.0035 | -0.0025 |
| Interaction_MYYear_17Month_9 | -0.033 | 0.069 | 0.033 | -0.0047 | -0.016 | -0.0029 | 0.026 | -0.034 | 0.0015 | -0.0012 | -0.0081 | -0.0041 | -0.056 | 0.15 | -0.051 | 0.055 | 0.01 | 0.062 | -0.018 | -0.034 | -0.058 | -0.0091 | 0.14 | -0.014 | 0.01 | -0.011 | 0.034 | -0.016 | 0.017 | 0.0068 | -0.0052 | 0.0059 | 0.015 | 0.0073 | 0.022 | 0.0028 | 0.075 | 0.15 | 0.43 | 0.075 | 0.089 | -0.019 | -0.02 | 0.03 | -0.0016 | -0.0097 | 0.011 | 0.01 | -0.037 | -0.051 | -0.0018 | 0.028 | 0.0066 | -0.0014 | nan | -0.0044 | 0.035 | 0.0046 | 0.062 | -0.081 | 0.0037 | -0.0037 | -0.0028 | -0.0035 | -0.0022 | -0.0035 | -0.028 | -0.012 | -0.0059 | -0.026 | -0.018 | -0.018 | -0.024 | -0.014 | -0.018 | -0.017 | -0.018 | -0.037 | -0.02 | -0.022 | -0.033 | -0.023 | -0.033 | -0.004 | -0.0055 | -0.14 | -0.025 | -0.0053 | -0.072 | -0.036 | -0.0057 | 1 | -0.019 | -0.025 | -0.024 | -0.011 | -0.022 | -0.076 | -0.4 | -0.18 | -0.046 | -0.038 | -0.053 | -0.01 | -0.012 | -0.00018 | 0.0079 | -0.0072 | -0.067 | -0.00025 | -0.051 | 0.028 |
| Interaction_MYYear_17Month_10 | -0.0044 | 0.02 | 0.04 | -0.00098 | -0.0011 | 0.0013 | 0.0039 | -0.0059 | 0.0028 | -0.00053 | -0.00028 | 0.0012 | 0.014 | 0.004 | -0.0071 | 0.039 | 0.022 | 0.042 | 0.08 | -0.0074 | -0.013 | -0.00089 | 0.072 | -0.0023 | -0.0023 | -0.0017 | 0.014 | 0.057 | 0.0015 | -0.0018 | 0.0006 | 0.0033 | 6.8e-05 | -1.9e-05 | 0.0087 | 0.002 | 0.08 | 0.004 | -0.018 | 0.045 | 0.083 | 0.012 | 0.022 | 0.015 | 0.00064 | -0.0021 | 0.027 | -0.0061 | -0.0021 | -0.0083 | -0.00042 | 0.0038 | -0.0015 | 0.0081 | nan | 0.013 | 0.0061 | 0.0011 | 0.0085 | -0.015 | 0.0039 | -0.00037 | -0.00027 | -0.00035 | -0.00021 | -0.00034 | -0.0027 | -0.0012 | -0.00058 | -0.0026 | -0.0018 | -0.0017 | -0.0023 | -0.0013 | -0.0017 | -0.0016 | -0.0018 | -0.0036 | -0.002 | -0.0022 | -0.0032 | -0.0023 | -0.0033 | -0.00039 | -0.00054 | -0.014 | -0.0025 | -0.00052 | -0.007 | -0.0035 | -0.00056 | -0.019 | 1 | -0.0025 | -0.0024 | -0.0011 | -0.0021 | -0.0075 | -0.039 | -0.018 | -0.0044 | -0.0037 | -0.0052 | -0.001 | -0.0026 | -1.7e-05 | -0.006 | 0.00051 | -0.0045 | -2.5e-05 | -0.0098 | 0.0024 |
| Interaction_MYYear_17Month_11 | -0.0055 | 0.028 | 0.05 | 0.0012 | 0.0033 | 0.0056 | 0.0052 | -0.0068 | 0.0018 | -0.00082 | -0.00091 | 0.00069 | -0.0059 | 0.017 | -0.0091 | 0.052 | 0.018 | 0.057 | 0.13 | -0.013 | -0.019 | -0.0012 | 0.097 | -0.0021 | 0.0021 | -0.0022 | 0.008 | 0.0096 | 0.0012 | -0.0015 | -0.00015 | -0.0016 | 0.0002 | 0.0016 | 0.004 | -0.00038 | 0.092 | 0.017 | -0.024 | 0.061 | 0.095 | -3.7e-05 | 0.0094 | 0.023 | 0.00084 | -0.0028 | 0.02 | -0.011 | 0.00029 | -0.01 | -0.00027 | 0.0051 | -0.0022 | 0.0099 | nan | -0.00083 | 0.0079 | 1.7e-05 | 0.015 | -0.015 | 0.0042 | -0.00049 | -0.00036 | -0.00046 | -0.00029 | -0.00045 | -0.0036 | -0.0015 | -0.00077 | -0.0034 | -0.0024 | -0.0023 | -0.0031 | -0.0018 | -0.0023 | -0.0022 | -0.0024 | -0.0048 | -0.0026 | -0.0029 | -0.0043 | -0.0031 | -0.0043 | -0.00052 | -0.00072 | -0.018 | -0.0033 | -0.00069 | -0.0093 | -0.0047 | -0.00075 | -0.025 | -0.0025 | 1 | -0.0032 | -0.0015 | -0.0028 | -0.01 | -0.051 | -0.023 | -0.0059 | -0.0049 | -0.0069 | -0.0013 | -0.003 | -2.3e-05 | -0.015 | 0.0038 | -0.013 | -3.3e-05 | -0.013 | 0.0042 |
| Interaction_MYYear_17Month_12 | 0.01 | 0.021 | 0.031 | -0.0024 | -0.002 | 0.0035 | -0.0096 | -0.006 | 0.0015 | -0.0024 | 0.00019 | 0.0024 | -0.0016 | 0.012 | -0.004 | 0.054 | 0.019 | 0.059 | 0.1 | -0.013 | -0.014 | -0.0011 | 0.034 | -0.0021 | 0.0026 | 0.00076 | 0.0096 | 0.0068 | 0.0018 | -0.0036 | -0.0027 | -0.0053 | 0.0034 | 0.00075 | 0.0039 | -0.0003 | 0.046 | 0.012 | -0.022 | 0.059 | 0.047 | -0.0039 | 0.0074 | 0.025 | 0.00059 | -0.0016 | 0.021 | -0.011 | 0.005 | -0.01 | -0.00054 | 0.0039 | -0.0019 | 0.012 | nan | -0.0014 | 0.0087 | -0.00078 | 0.018 | -0.015 | 0.0021 | -0.00047 | -0.00035 | -0.00044 | -0.00027 | -0.00043 | -0.0035 | -0.0015 | -0.00074 | -0.0033 | -0.0023 | -0.0022 | -0.0029 | -0.0017 | -0.0022 | -0.0021 | -0.0023 | -0.0047 | -0.0025 | -0.0028 | -0.0042 | -0.0029 | -0.0042 | -0.0005 | -0.00069 | -0.018 | -0.0032 | -0.00066 | -0.009 | -0.0045 | -0.00072 | -0.024 | -0.0024 | -0.0032 | 1 | -0.0014 | -0.0027 | -0.0096 | -0.049 | -0.023 | -0.0057 | -0.0047 | -0.0066 | -0.0013 | -0.0012 | -2.2e-05 | -0.0073 | 0.00093 | -0.014 | -3.2e-05 | -0.014 | 0.0067 |
| Interaction_MYYear_18Month_1 | 0.084 | 0.015 | 0.015 | -0.00054 | -0.0026 | 0.0069 | -0.08 | -0.0044 | 0.0014 | 0.00038 | -0.0028 | -0.00032 | 0.087 | 0.015 | 0.082 | 0.063 | 0.0013 | 0.074 | 0.099 | 0.022 | -0.011 | -0.00052 | 0.054 | 0.0015 | -0.0012 | 0.016 | 0.002 | 0.014 | 0.0022 | 0.0041 | 0.001 | 0.0081 | 0.0036 | 4.3e-05 | 0.0015 | 0.0012 | 0.034 | 0.014 | 0.0057 | 0.035 | 0.034 | 0.0044 | 0.005 | 0.0037 | 0.0013 | 0.0013 | 0.0049 | -0.0014 | -0.014 | -0.0028 | -0.00025 | 0.0027 | 0.0035 | -0.004 | nan | 0.0016 | -0.00079 | -0.0018 | 0.00063 | -0.013 | 0.0043 | -0.00022 | -0.00016 | -0.0002 | -0.00013 | -0.0002 | -0.0016 | -0.00068 | -0.00034 | -0.0015 | -0.001 | -0.001 | -0.0014 | -0.00079 | -0.001 | -0.00096 | -0.001 | -0.0021 | -0.0012 | -0.0013 | -0.0019 | -0.0013 | -0.0019 | -0.00023 | -0.00032 | -0.0081 | -0.0015 | -0.00031 | -0.0041 | -0.0021 | -0.00033 | -0.011 | -0.0011 | -0.0015 | -0.0014 | 1 | -0.0012 | -0.0044 | -0.023 | -0.01 | -0.0026 | -0.0022 | -0.003 | -0.00059 | 0.0023 | -1e-05 | 0.0015 | -0.0022 | -0.0046 | -1.4e-05 | -0.0064 | -0.0026 |
| Interaction_MYYear_18Month_2 | -0.005 | 0.015 | 0.028 | -0.0013 | -0.002 | 9.4e-05 | 0.0043 | -0.006 | 0.0015 | -0.0011 | -0.0013 | 0.00089 | 0.032 | 0.011 | 0.027 | 0.055 | 0.018 | 0.062 | 0.1 | -0.0064 | -0.015 | -0.001 | 0.033 | 0.0014 | 0.00053 | -0.0019 | 0.014 | 0.015 | 0.0024 | -0.0029 | -0.0019 | -0.0017 | 0.0039 | 0.00013 | 0.0047 | 0.0031 | 0.07 | 0.011 | -0.015 | 0.055 | 0.072 | 0.0049 | 0.013 | 0.021 | 0.00045 | -0.0011 | 0.016 | -0.0073 | 0.0039 | -0.0076 | 0.0002 | 0.0036 | -0.0017 | 0.012 | nan | 0.0015 | 0.011 | 0.00084 | 0.019 | -0.016 | 0.004 | -0.00042 | -0.00031 | -0.00039 | -0.00024 | -0.00039 | -0.0031 | -0.0013 | -0.00066 | -0.0029 | -0.002 | -0.002 | -0.0026 | -0.0015 | -0.002 | -0.0019 | -0.002 | -0.0041 | -0.0022 | -0.0025 | -0.0037 | -0.0026 | -0.0037 | -0.00044 | -0.00061 | -0.016 | -0.0028 | -0.00059 | -0.008 | -0.004 | -0.00064 | -0.022 | -0.0021 | -0.0028 | -0.0027 | -0.0012 | 1 | -0.0085 | -0.044 | -0.02 | -0.0051 | -0.0042 | -0.0059 | -0.0011 | 0.0015 | -2e-05 | -0.0034 | -0.00041 | -0.012 | -2.8e-05 | -0.011 | 0.0068 |
| Interaction_MYYear_18Month_3 | 0.0031 | -0.0019 | -0.002 | 0.00024 | 0.003 | 0.0037 | -0.0029 | -0.023 | 0.00097 | -0.0033 | -0.0061 | 0.00097 | 0.013 | 0.046 | 0.011 | 0.17 | 0.052 | 0.19 | 0.07 | -0.044 | -0.0021 | -0.0036 | 0.095 | -0.0067 | 0.0055 | -0.0045 | 0.041 | 0.0049 | 0.016 | -0.0092 | -0.0014 | -0.014 | 0.0046 | -0.0047 | 0.009 | 0.012 | 0.043 | 0.046 | -0.067 | 0.19 | 0.049 | -0.0016 | 0.028 | 0.081 | -0.00085 | -0.012 | 0.019 | -0.015 | 0.023 | -0.031 | -0.001 | 0.01 | -0.0024 | 0.032 | nan | -0.0028 | 0.027 | -0.0033 | 0.067 | -0.023 | -0.0015 | -0.0015 | -0.0011 | -0.0014 | -0.00086 | -0.0014 | -0.011 | -0.0047 | -0.0023 | -0.01 | -0.0071 | -0.007 | -0.0093 | -0.0054 | -0.0069 | -0.0066 | -0.0072 | -0.015 | -0.0079 | -0.0088 | -0.013 | -0.0092 | -0.013 | -0.0016 | -0.0022 | -0.055 | -0.01 | -0.0021 | -0.028 | -0.014 | -0.0023 | -0.076 | -0.0075 | -0.01 | -0.0096 | -0.0044 | -0.0085 | 1 | -0.16 | -0.071 | -0.018 | -0.015 | -0.021 | -0.0041 | -0.0097 | -6.9e-05 | -0.02 | 0.0055 | -0.037 | -9.9e-05 | -0.034 | 0.022 |
| Interaction_MYYear_18Month_4 | -0.084 | -0.12 | -0.29 | 0.011 | 0.028 | -0.02 | 0.085 | 0.12 | -0.018 | 0.0097 | 0.046 | 0.036 | -0.14 | -0.074 | -0.13 | -0.59 | -0.12 | -0.63 | -0.33 | 0.058 | 0.12 | 0.023 | -0.47 | 0.018 | -0.014 | -0.0045 | -0.012 | -0.018 | -0.017 | 0.016 | -0.028 | 0.013 | 0.037 | -1.6e-06 | -0.037 | 0.069 | -0.5 | -0.074 | -0.72 | -0.58 | -0.5 | -0.038 | -0.09 | -0.18 | 0.021 | 0.052 | -0.074 | -0.013 | -0.024 | 1.6e-05 | -0.0043 | -0.043 | 0.022 | -0.1 | nan | 0.01 | -0.042 | 0.017 | -0.064 | 0.14 | -0.035 | -0.0076 | -0.0057 | -0.0072 | -0.0045 | -0.0071 | -0.056 | -0.024 | -0.012 | -0.054 | -0.037 | -0.036 | -0.048 | -0.028 | -0.036 | -0.034 | -0.037 | -0.076 | -0.041 | -0.045 | -0.068 | -0.048 | -0.068 | -0.0081 | -0.011 | -0.29 | -0.052 | -0.011 | -0.15 | -0.074 | -0.012 | -0.4 | -0.039 | -0.051 | -0.049 | -0.023 | -0.044 | -0.16 | 1 | -0.37 | -0.093 | -0.076 | -0.11 | -0.021 | 0.0026 | 0.00045 | 0.052 | -0.013 | 0.11 | 0.00063 | 0.15 | -0.028 |
| Interaction_MYYear_18Month_5 | -0.022 | -0.041 | -0.11 | 0.01 | 0.012 | 0.013 | 0.021 | -0.062 | 0.015 | -0.0017 | -0.0019 | -0.0046 | -0.038 | -0.13 | -0.032 | -0.037 | 0.088 | -0.041 | -0.12 | -0.087 | 0.023 | -0.0084 | -0.093 | 0.013 | 0.011 | -0.0072 | -0.065 | -0.016 | -0.024 | -0.016 | 0.033 | -0.027 | -0.08 | 0.015 | -0.00032 | -0.086 | -0.0051 | -0.13 | -0.00087 | -0.032 | 0.0042 | 0.0082 | 0.047 | 0.0092 | 0.023 | 0.0009 | 0.1 | -0.065 | -0.014 | -0.054 | -0.0014 | 0.0025 | -0.029 | 0.099 | nan | -0.018 | -0.0082 | -0.0037 | -0.05 | 0.0034 | 0.015 | -0.0035 | -0.0026 | -0.0033 | -0.002 | -0.0032 | -0.026 | -0.011 | -0.0055 | -0.024 | -0.017 | -0.017 | -0.022 | -0.013 | -0.016 | -0.015 | -0.017 | -0.035 | -0.019 | -0.021 | -0.031 | -0.022 | -0.031 | -0.0037 | -0.0051 | -0.13 | -0.023 | -0.0049 | -0.067 | -0.034 | -0.0053 | -0.18 | -0.018 | -0.023 | -0.023 | -0.01 | -0.02 | -0.071 | -0.37 | 1 | -0.042 | -0.035 | -0.049 | -0.0096 | 0.025 | -0.00016 | -0.065 | 0.032 | 0.0094 | -0.00023 | -0.028 | -0.0017 |
| Interaction_MYYear_18Month_6 | 0.25 | -0.042 | 0.043 | -0.0025 | -0.013 | 0.014 | -0.24 | -0.03 | 0.012 | -0.0004 | -0.019 | -0.009 | 0.48 | 0.013 | 0.46 | 0.29 | 0.011 | 0.34 | 0.085 | 0.074 | 0.0018 | -0.0019 | 0.1 | 0.0032 | -0.0077 | 0.043 | 0.0048 | 0.049 | 0.0047 | 0.015 | 0.0055 | 0.036 | -0.0022 | -0.0014 | 0.0053 | 0.0029 | 0.12 | 0.014 | 0.047 | 0.15 | 0.12 | 0.03 | 0.031 | 0.003 | 0.0017 | 0.0026 | 0.0095 | -0.0052 | -0.081 | 0.025 | 0.013 | 0.0042 | 0.011 | 0.00058 | nan | 0.0062 | 0.00049 | -0.0061 | -0.01 | -0.054 | 0.023 | -0.00088 | -0.00065 | -0.00083 | -0.00051 | -0.00081 | -0.0065 | -0.0028 | -0.0014 | -0.0062 | -0.0042 | -0.0042 | -0.0055 | -0.0032 | -0.0041 | -0.0039 | -0.0043 | -0.0087 | -0.0047 | -0.0052 | -0.0078 | -0.0055 | -0.0078 | -0.00093 | -0.0013 | -0.033 | -0.0059 | -0.0012 | -0.017 | -0.0085 | -0.0013 | -0.046 | -0.0044 | -0.0059 | -0.0057 | -0.0026 | -0.0051 | -0.018 | -0.093 | -0.042 | 1 | -0.0088 | -0.012 | -0.0024 | 0.0062 | -4.1e-05 | 0.0095 | -0.011 | -0.0045 | -5.8e-05 | -0.019 | -0.02 |
| Interaction_MYYear_18Month_7 | -0.0086 | -0.011 | 0.028 | 0.0015 | -0.011 | 0.0054 | 0.003 | -0.023 | 0.0038 | 0.00047 | -0.012 | -0.0073 | 0.49 | -0.012 | 0.46 | 0.22 | 0.027 | 0.27 | 0.1 | 0.0068 | 0.0005 | -0.0017 | 0.018 | 0.012 | -0.0098 | 0.0017 | 0.0083 | 0.073 | 0.00049 | 0.0046 | 0.0055 | 0.029 | -0.015 | -0.0016 | 0.0063 | -0.0025 | 0.092 | -0.01 | 0.03 | 0.11 | 0.09 | 0.038 | 0.049 | 0.0047 | 0.0036 | -0.0015 | 0.014 | -0.00094 | -0.068 | -0.00051 | 0.0029 | -0.00016 | 0.0032 | 0.032 | nan | 0.0096 | 0.005 | -0.0055 | -0.0096 | -0.046 | 0.0077 | -0.00072 | -0.00054 | -0.00069 | -0.00042 | -0.00067 | -0.0053 | -0.0023 | -0.0012 | -0.0051 | -0.0035 | -0.0034 | -0.0046 | -0.0026 | -0.0034 | -0.0032 | -0.0035 | -0.0072 | -0.0039 | -0.0043 | -0.0064 | -0.0045 | -0.0064 | -0.00077 | -0.0011 | -0.027 | -0.0049 | -0.001 | -0.014 | -0.007 | -0.0011 | -0.038 | -0.0037 | -0.0049 | -0.0047 | -0.0022 | -0.0042 | -0.015 | -0.076 | -0.035 | -0.0088 | 1 | -0.01 | -0.002 | 0.016 | -3.4e-05 | 0.0018 | -0.0091 | 0.0015 | -4.8e-05 | -0.015 | -0.011 |
| Interaction_MYYear_18Month_8 | 0.32 | 0.011 | 0.0096 | -0.0018 | -0.0052 | 0.012 | -0.3 | -0.028 | 0.013 | 0.0018 | -0.019 | -0.014 | 0.21 | 0.023 | 0.18 | 0.25 | -0.0077 | 0.22 | 0.21 | 0.13 | 0.02 | -0.0016 | 0.16 | 0.0028 | -0.008 | 0.066 | 0.015 | 0.077 | 0.013 | 0.021 | 0.0048 | 0.036 | 0.024 | -0.0044 | 0.0071 | 0.025 | 0.19 | 0.023 | 0.085 | 0.15 | 0.18 | 0.062 | 0.031 | 0.0093 | 0.0036 | 0.0024 | 0.00057 | -0.0034 | -0.041 | 0.086 | 0.017 | 0.0047 | 0.019 | -0.033 | nan | 0.017 | -0.0097 | -0.0066 | -0.013 | -0.016 | 0.03 | -0.001 | -0.00076 | -0.00097 | -0.0006 | -0.00095 | -0.0075 | -0.0032 | -0.0016 | -0.0072 | -0.0049 | -0.0049 | -0.0064 | -0.0037 | -0.0048 | -0.0045 | -0.005 | -0.01 | -0.0055 | -0.0061 | -0.0091 | -0.0064 | -0.0091 | -0.0011 | -0.0015 | -0.038 | -0.0069 | -0.0015 | -0.02 | -0.0099 | -0.0016 | -0.053 | -0.0052 | -0.0069 | -0.0066 | -0.003 | -0.0059 | -0.021 | -0.11 | -0.049 | -0.012 | -0.01 | 1 | -0.0028 | -0.012 | -4.8e-05 | 0.018 | -0.0086 | 0.0037 | -6.8e-05 | -0.0012 | -0.022 |
| Interaction_MYYear_18Month_9 | -0.0024 | 0.012 | 0.026 | -0.00016 | 0.00029 | -0.00041 | 0.0026 | -0.0094 | 0.0044 | 0.00013 | -0.0096 | -0.0097 | -0.0042 | -0.0023 | -0.0039 | 0.15 | -0.017 | 0.061 | 0.14 | 0.042 | 0.0052 | 0.00051 | 0.16 | 0.0012 | -0.00058 | 0.0028 | 0.0014 | -0.00094 | 0.00092 | 0.0028 | 0.0017 | 0.007 | 0.01 | 0.0026 | -0.00096 | 0.0062 | 0.11 | -0.0024 | 0.067 | 0.14 | 0.024 | 0.029 | 0.0015 | -0.021 | -0.00025 | -0.0017 | -0.0071 | 0.00096 | -0.0012 | 0.17 | 0.021 | -0.0027 | 0.0049 | -0.011 | nan | 0.0077 | -0.00089 | 0.0017 | -0.0016 | 0.011 | 0.01 | -0.0002 | -0.00015 | -0.00019 | -0.00012 | -0.00018 | -0.0015 | -0.00063 | -0.00032 | -0.0014 | -0.00096 | -0.00095 | -0.0013 | -0.00073 | -0.00093 | -0.00089 | -0.00097 | -0.002 | -0.0011 | -0.0012 | -0.0018 | -0.0012 | -0.0018 | -0.00021 | -0.00029 | -0.0075 | -0.0013 | -0.00028 | -0.0038 | -0.0019 | -0.0003 | -0.01 | -0.001 | -0.0013 | -0.0013 | -0.00059 | -0.0011 | -0.0041 | -0.021 | -0.0096 | -0.0024 | -0.002 | -0.0028 | 1 | -0.0015 | -9.4e-06 | 0.0063 | -0.0019 | 0.01 | -1.3e-05 | 0.012 | -0.00097 |
| Device_PossibleOwnership | -0.031 | -0.021 | -0.0028 | 0.016 | 0.022 | -0.0073 | 0.03 | 0.021 | -0.026 | 0.001 | 0.019 | 0.022 | 0.023 | -0.053 | -0.00044 | -0.0063 | -0.026 | -0.0055 | -0.0096 | 0.05 | -0.01 | -0.0049 | -0.015 | 0.8 | -0.03 | -0.004 | -0.022 | 0.066 | -0.015 | -0.017 | -0.0076 | -0.014 | -0.033 | 0.037 | -0.0021 | -0.028 | -0.0057 | -0.052 | -0.012 | 0.00073 | -0.0054 | 0.072 | 0.029 | -0.094 | 0.0013 | -0.014 | 0.022 | -0.013 | 0.049 | 0.015 | 0.0029 | -0.0063 | -0.017 | -0.0076 | nan | -0.0019 | -0.014 | -0.012 | -0.041 | -0.011 | -0.073 | -0.0017 | -0.0014 | -0.00074 | -0.00033 | -0.0014 | 0.012 | 0.0064 | 0.0012 | -0.00048 | 0.0048 | -0.0031 | -0.0019 | -0.0028 | -0.0062 | -0.0039 | -0.0042 | -0.001 | -0.0039 | -0.0044 | -0.0082 | -0.0025 | -0.0065 | -0.00036 | -0.00011 | -0.022 | -0.0035 | -0.0019 | 0.029 | -0.0066 | 0.00041 | -0.012 | -0.0026 | -0.003 | -0.0012 | 0.0023 | 0.0015 | -0.0097 | 0.0026 | 0.025 | 0.0062 | 0.016 | -0.012 | -0.0015 | 1 | -0.00017 | 0.03 | -0.023 | 0.021 | 0.00053 | -0.017 | -0.018 |
| AV_highrisk | -4.1e-05 | -2.9e-05 | -0.00023 | 0.0048 | -5.3e-05 | 0.0067 | 4.4e-05 | -0.00024 | 0.00046 | 0.00023 | -0.0003 | -0.00011 | -7.2e-05 | -0.00013 | -6.7e-05 | -0.00027 | 0.0003 | -0.0003 | -0.00016 | -0.00026 | 9.6e-05 | -8.3e-06 | -0.00022 | -0.00014 | 5.9e-05 | -3.1e-05 | -0.00022 | -1.6e-05 | -5.7e-05 | -5.4e-05 | -0.00015 | -0.00023 | -0.00025 | -0.00012 | -0.00012 | -0.00022 | -0.00029 | -0.00013 | -0.00033 | -0.00028 | -0.0003 | -0.00033 | -7.5e-06 | 0.00067 | -0.00017 | 0.0003 | -2.1e-05 | -0.00014 | -0.00036 | -9.1e-05 | -3.9e-06 | -0.00017 | 7.7e-05 | 0.00041 | nan | -3.4e-05 | -0.00015 | -8e-05 | -9.8e-05 | -0.00025 | -0.00045 | -3.4e-06 | -2.5e-06 | -3.2e-06 | -2e-06 | -3.2e-06 | -2.5e-05 | -1.1e-05 | -5.4e-06 | -2.4e-05 | -1.6e-05 | -1.6e-05 | -2.1e-05 | -1.2e-05 | -1.6e-05 | -1.5e-05 | -1.7e-05 | -3.4e-05 | -1.8e-05 | -2e-05 | -3e-05 | -2.1e-05 | -3e-05 | -3.6e-06 | -5e-06 | -0.00013 | -2.3e-05 | -4.8e-06 | -6.5e-05 | -3.3e-05 | -5.2e-06 | -0.00018 | -1.7e-05 | -2.3e-05 | -2.2e-05 | -1e-05 | -2e-05 | -6.9e-05 | 0.00045 | -0.00016 | -4.1e-05 | -3.4e-05 | -4.8e-05 | -9.4e-06 | -0.00017 | 1 | -0.00061 | -5.4e-05 | -0.00026 | -2.1e-07 | -0.00019 | -0.00015 |
| AV_mediumrisk | 0.022 | -0.09 | -0.099 | -0.14 | -0.2 | -0.17 | -0.026 | -0.048 | 0.0039 | 0.012 | -0.066 | -0.036 | 0.013 | 0.05 | 0.0031 | -0.0042 | -0.17 | -0.0096 | 0.0069 | 0.16 | 0.047 | 0.013 | 0.0015 | 0.027 | 0.0031 | 0.0082 | 0.087 | 0.026 | 0.056 | 0.043 | -0.023 | 0.071 | 0.11 | -0.027 | 0.036 | 0.11 | -0.02 | 0.05 | -0.026 | -0.012 | -0.028 | 0.011 | -0.084 | 0.013 | -0.022 | 0.0091 | -0.13 | 0.12 | -0.12 | 0.036 | 0.003 | -0.002 | 0.068 | -0.21 | nan | 0.05 | 0.011 | 0.029 | 0.07 | -0.018 | 0.024 | 0.0033 | 0.0019 | 0.0033 | 0.001 | 0.0015 | 0.021 | 0.0042 | -0.00011 | 0.02 | 0.0037 | 0.013 | 0.0022 | -0.0024 | 0.0014 | 0.00084 | 0.011 | 0.02 | -0.005 | -0.012 | -0.01 | 0.00099 | -0.013 | 0.0032 | 0.0015 | -0.028 | -0.01 | 0.00033 | 0.014 | -0.015 | 0.00055 | 0.0079 | -0.006 | -0.015 | -0.0073 | 0.0015 | -0.0034 | -0.02 | 0.052 | -0.065 | 0.0095 | 0.0018 | 0.018 | 0.0063 | 0.03 | -0.00061 | 1 | -0.21 | -0.0037 | -0.00024 | 0.055 | 0.026 |
| AV_lowrisk | -0.01 | 0.03 | 0.026 | 0.084 | 0.044 | 0.25 | 0.011 | 0.011 | 0.0023 | -0.0042 | 0.017 | 0.0064 | -0.02 | -0.021 | -0.018 | -0.013 | 0.064 | -0.014 | -0.014 | -0.057 | -0.038 | -0.0026 | -0.011 | -0.017 | 0.0064 | -0.0033 | -0.03 | -0.0054 | -0.017 | -0.017 | 0.012 | -0.026 | -0.039 | 0.0043 | -0.0098 | -0.039 | -0.0023 | -0.021 | 0.0022 | -0.0071 | -1.2e-06 | 0.001 | 0.036 | 0.00018 | 0.0084 | 0.0016 | 0.044 | -0.034 | 0.058 | -0.012 | -0.00083 | -0.00044 | -0.022 | 0.08 | nan | -0.011 | -0.0087 | -0.0092 | -0.021 | 0.022 | -0.0076 | -0.0012 | -0.00086 | -0.0011 | -0.00068 | -0.0011 | -0.0074 | -0.002 | -0.001 | -0.0061 | -0.0036 | -0.0038 | -0.0028 | -0.0021 | -0.0025 | -0.0024 | -0.0036 | -0.0061 | -0.0013 | -0.00032 | 0.00058 | -0.0011 | 0.0028 | -0.00096 | -0.0013 | 0.012 | 0.0013 | -0.001 | -0.01 | 0.0012 | -0.0014 | -0.0072 | 0.00051 | 0.0038 | 0.00093 | -0.0022 | -0.00041 | 0.0055 | -0.013 | 0.032 | -0.011 | -0.0091 | -0.0086 | -0.0019 | -0.023 | -5.4e-05 | -0.21 | 1 | 0.0057 | -7.8e-05 | -0.017 | -0.0069 |
| Interaction_01 | -0.014 | -0.036 | -0.064 | 0.011 | 0.017 | 0.0027 | 0.012 | 0.019 | 0.026 | -0.00064 | 0.027 | 0.019 | 0.0079 | -0.19 | -0.015 | -0.091 | -0.053 | -0.1 | -0.085 | 0.056 | 0.037 | 0.0015 | -0.085 | 0.033 | -0.014 | 0.0034 | -0.11 | 0.062 | -0.051 | 0.0036 | 0.14 | 0.1 | 0.042 | 0.015 | -0.036 | -0.034 | -0.07 | -0.19 | -0.11 | -0.097 | -0.073 | -0.0097 | -0.031 | -0.053 | 0.053 | 0.038 | 0.01 | -0.0025 | -0.016 | 0.00046 | 0.003 | -0.027 | -0.0047 | -0.0091 | nan | 0.08 | -0.07 | -0.012 | -0.2 | 0.067 | 0.034 | -0.0014 | -0.00074 | -0.0015 | -0.00067 | -0.002 | -0.015 | -0.0085 | -0.0064 | -0.0099 | -0.017 | -0.0073 | -0.012 | -0.0096 | -0.0089 | -0.0062 | -0.0058 | -0.0082 | -0.011 | -0.009 | -0.023 | -0.0085 | -0.0075 | -0.0054 | -0.0016 | -0.026 | -0.013 | -0.0019 | -0.051 | -0.021 | -0.0019 | -0.067 | -0.0045 | -0.013 | -0.014 | -0.0046 | -0.012 | -0.037 | 0.11 | 0.0094 | -0.0045 | 0.0015 | 0.0037 | 0.01 | 0.021 | -0.00026 | -0.0037 | 0.0057 | 1 | 0.0003 | 0.16 | -0.063 |
| Interaction_02 | -5.9e-05 | -4.1e-05 | -8.3e-05 | -8.1e-05 | -7.5e-05 | -6.5e-05 | 6.3e-05 | 5.4e-05 | 0.00033 | -0.00015 | -0.00045 | -0.00042 | 0.0016 | -0.00018 | 0.0016 | 0.00051 | 0.00042 | 0.00072 | -8.8e-05 | -0.00037 | 0.00013 | -1.2e-05 | 4.4e-06 | 0.0002 | 8.1e-05 | -4.3e-05 | -0.00031 | -2.2e-05 | -9.5e-05 | -0.00014 | 0.00067 | 0.00034 | -0.00036 | 0.0009 | -0.00017 | -0.00031 | 0.00022 | -0.00018 | -0.00012 | 0.00027 | 0.00024 | -0.00046 | -1e-05 | -0.0003 | -0.00049 | -0.00042 | -3.3e-05 | -0.00019 | -0.00051 | -0.00013 | -5.5e-06 | 0.00014 | -0.00024 | 9.3e-06 | nan | -5e-05 | -0.00022 | -0.00012 | -0.00015 | -0.00035 | 0.00033 | -4.8e-06 | -3.4e-06 | -4.5e-06 | -2.8e-06 | -4.4e-06 | -3.4e-05 | -1.5e-05 | -7.6e-06 | -3.3e-05 | -2.3e-05 | -2.3e-05 | -3e-05 | -1.8e-05 | -2.3e-05 | -2.1e-05 | -2.3e-05 | -4.7e-05 | -2.6e-05 | -2.9e-05 | -4.3e-05 | -3e-05 | -4.3e-05 | -5.2e-06 | -6.9e-06 | -0.00018 | -3.3e-05 | -6.8e-06 | -9e-05 | -4.7e-05 | -7.3e-06 | -0.00025 | -2.5e-05 | -3.3e-05 | -3.2e-05 | -1.4e-05 | -2.8e-05 | -9.9e-05 | 0.00063 | -0.00023 | -5.8e-05 | -4.8e-05 | -6.8e-05 | -1.3e-05 | 0.00053 | -2.1e-07 | -0.00024 | -7.8e-05 | 0.0003 | 1 | -0.00027 | -0.00021 |
| Interaction_03 | -0.027 | -0.045 | -0.083 | 0.005 | 0.018 | -0.0043 | 0.025 | -0.015 | 0.015 | 0.023 | -0.003 | -0.016 | -0.036 | -0.11 | -0.037 | -0.11 | -0.099 | -0.13 | -0.086 | 0.071 | 0.052 | 0.0028 | -0.094 | 0.0052 | -0.007 | 0.0054 | 0.2 | 0.0013 | 0.015 | 0.025 | -0.057 | 0.04 | 0.18 | -0.013 | 0.088 | 0.2 | -0.083 | -0.11 | -0.11 | -0.11 | -0.085 | -0.026 | -0.057 | -0.03 | 0.028 | 0.041 | 0.0083 | -0.031 | 0.08 | 0.01 | 0.0028 | -0.016 | 0.062 | 0.033 | nan | 0.04 | 0.18 | 0.3 | 0.16 | 0.08 | 0.1 | -0.00016 | -0.00046 | -0.0021 | -0.00055 | -0.0028 | -0.022 | -0.0093 | -0.0049 | -0.016 | -0.015 | -0.0094 | -0.014 | -0.009 | -0.012 | -0.011 | -0.013 | -0.018 | -0.014 | -0.016 | -0.021 | -0.011 | -0.016 | -0.00019 | -0.0031 | -0.031 | -0.015 | -0.0033 | -0.052 | -0.023 | -0.0035 | -0.051 | -0.0098 | -0.013 | -0.014 | -0.0064 | -0.011 | -0.034 | 0.15 | -0.028 | -0.019 | -0.015 | -0.0012 | 0.012 | -0.017 | -0.00019 | 0.055 | -0.017 | 0.16 | -0.00027 | 1 | 0.56 |
| PPI | -0.04 | -0.0067 | -0.011 | -0.0045 | 0.0033 | -0.011 | 0.039 | -0.013 | -0.02 | 0.015 | 0.0077 | -0.02 | -0.033 | 0.0088 | -0.027 | -0.0029 | 0.074 | -0.0054 | 0.0056 | -0.076 | 0.0043 | 0.019 | 0.0034 | -0.017 | 0.03 | -0.017 | 0.28 | -0.019 | 0.04 | -0.014 | -0.14 | -0.1 | 0.11 | -0.055 | 0.22 | 0.011 | 0.026 | 0.0089 | 0.035 | 0.011 | 0.029 | -0.02 | 0.036 | 0.072 | -0.018 | -0.016 | 0.034 | -0.066 | 0.2 | 0.0064 | 0.00015 | 0.052 | 0.058 | 0.22 | nan | -0.0036 | 0.45 | 0.44 | 0.47 | -0.035 | 0.046 | 0.00089 | 0.00046 | -0.0001 | 0.00027 | -0.0021 | -0.014 | -0.0021 | 0.0022 | -0.0092 | -0.0002 | -0.0043 | -0.0016 | 0.00083 | -0.0037 | -0.0011 | -0.006 | -0.0073 | 0.0019 | -0.0013 | 0.0076 | -0.0009 | -0.0012 | 0.0063 | -0.0024 | 0.028 | 0.0014 | -0.0026 | -0.0086 | 0.0074 | -0.0025 | 0.028 | 0.0024 | 0.0042 | 0.0067 | -0.0026 | 0.0068 | 0.022 | -0.028 | -0.0017 | -0.02 | -0.011 | -0.022 | -0.00097 | -0.018 | -0.00015 | 0.026 | -0.0069 | -0.063 | -0.00021 | 0.56 | 1 |
#X.drop(columns=['HasTpm','AV_mediumrisk','AV_update_status''Interaction_P_OsVer_01','Interaction_P_OsVer_02','Interaction_P_OsVer_03'],inplace=True)
#df_test.drop(columns=['HasTpm','AV_mediumrisk','AV_update_status''Interaction_P_OsVer_01','Interaction_P_OsVer_02','Interaction_P_OsVer_03'],inplace=True)
X[['Census_OSBuildRevision','Census_OSSkuName']].head()
| Census_OSBuildRevision | Census_OSSkuName | |
|---|---|---|
| 0 | 0.032027 | -0.018806 |
| 1 | -0.610708 | 0.797394 |
| 2 | -0.707118 | -0.018806 |
| 3 | -0.674982 | 2.429792 |
| 4 | -0.578571 | -0.018806 |
#df.drop(columns=['OsSuite','OsBuild','OsVer','Census_OSArchitecture','Census_OSBuildRevision','Census_OSSkuName'],inplace=True)
#df_test.drop(columns=['OsSuite','OsBuild','OsVer','Census_OSArchitecture','Census_OSBuildRevision','Census_OSSkuName'],inplace=True)
#X.drop(columns=['OsSuite','OsBuild','OsVer','Census_OSArchitecture','Census_OSBuildRevision','Census_OSSkuName'],inplace=True)
df_test.drop(columns=['Census_OSBuildRevision','Census_OSSkuName'],inplace=True)
X.drop(columns=['Census_OSBuildRevision','Census_OSSkuName'],inplace=True)
%%time
for col in X.columns:
if X[col].nunique()==1:
print(col)
X.drop(columns=[col],inplace=True)
df_test.drop(columns=[col],inplace=True)
else:
continue
Census_IsWIMBootEnabled Wall time: 39.5 s
%%time
fig,ax=plt.subplots(figsize=(12,12))
corr=X.corr()
# plot the heatmap
sns.heatmap(corr,xticklabels=corr.columns,yticklabels=corr.columns,ax=ax)
Wall time: 3min 6s
%%time
cmap=sns.diverging_palette(5, 250, as_cmap=True)
def magnify():
return [dict(selector="th",
props=[("font-size", "7pt")]),
dict(selector="td",
props=[('padding', "0em 0em")]),
dict(selector="th:hover",
props=[("font-size", "12pt")]),
dict(selector="tr:hover td:hover",
props=[('max-width', '200px'),
('font-size', '12pt')])
]
corr.style.background_gradient(cmap, axis=1)\
.set_properties(**{'max-width': '80px', 'font-size': '10pt'})\
.set_caption("Hover to magify")\
.set_precision(2)\
.set_table_styles(magnify())
Wall time: 40.9 ms
%%time
#X.drop(columns=['HasTpm','IeVerIdentifier','AV_update_status','Interaction_03','LocaleEnglishNameIdentifier','OsPlatformSubRelease','IsProtected','Census_OSVersion','Census_MDC2FormFactor','Device_PossibleOwnership','Census_OSBuildNumber','Platform','Census_OSInstallLanguageIdentifier'],inplace=True)
#df_test.drop(columns=['HasTpm','IeVerIdentifier','AV_update_status','Interaction_03','LocaleEnglishNameIdentifier','OsPlatformSubRelease','IsProtected','Census_OSVersion','Census_MDC2FormFactor','Device_PossibleOwnership','Census_OSBuildNumber','Platform','Census_OSInstallLanguageIdentifier'],inplace=True)
Wall time: 0 ns
#X.drop(columns=['IeVerIdentifier_encode','Census_MDC2FormFactor_encode'],inplace=True)
#df_test.drop(columns=['IeVerIdentifier_encode','Census_MDC2FormFactor_encode'],inplace=True)
X.shape
(6244507, 109)
from sklearn.model_selection import train_test_split
X_train,X_test,y_train,y_test=train_test_split(X.values,y.values,test_size=0.3,random_state=42)
X_train, X_val, y_train, y_val = train_test_split(X_train, y_train, test_size=0.2, random_state=1)
from sklearn.metrics import accuracy_score,precision_score,recall_score,confusion_matrix,roc_auc_score,roc_curve
from sklearn.tree import DecisionTreeClassifier
clf1 = DecisionTreeClassifier(criterion='entropy', max_depth=5)
clf2 = KNeighborsClassifier(n_neighbors=5)
bagging1 = BaggingClassifier(base_estimator=clf1, n_estimators=10, max_samples=0.8, max_features=0.9)
bagging2 = BaggingClassifier(base_estimator=clf2, n_estimators=10, max_samples=0.8, max_features=0.9)
import itertools
%%time
bagging1.fit(X_train,y_train)
Wall time: 12min 47s
BaggingClassifier(base_estimator=DecisionTreeClassifier(class_weight=None, criterion='entropy', max_depth=5,
max_features=None, max_leaf_nodes=None,
min_impurity_decrease=0.0, min_impurity_split=None,
min_samples_leaf=1, min_samples_split=2,
min_weight_fraction_leaf=0.0, presort=False, random_state=None,
splitter='best'),
bootstrap=True, bootstrap_features=False, max_features=0.9,
max_samples=0.8, n_estimators=10, n_jobs=1, oob_score=False,
random_state=None, verbose=0, warm_start=False)
predictions=bagging1.predict(X_test)
prediction_scores=bagging1.predict_proba(X_test)[:,1]
print("Confusion Matrix for the Model : "+"\n",confusion_matrix(y_test,predictions))
print("Accuracy of the model : ",accuracy_score(y_test,predictions))
print("Precision of the Model : ",precision_score(y_test,predictions))
print("Recall score of the Model : ",recall_score(y_test,predictions))
print("Area under ROC curve for the Model : ",roc_auc_score(y_test,prediction_scores))
Confusion Matrix for the Model : [[459220 478785] [245312 690036]] Accuracy of the model : 0.6134754101335946 Precision of the Model : 0.5903692695459783 Recall score of the Model : 0.7377318388450074 Area under ROC curve for the Model : 0.6683681213065038
%%time
#bagging2.fit(X_train,y_train)
#predictions=bagging2.predict(X_test)
#prediction_scores=bagging2.predict_proba(X_test)[:,1]
print("Confusion Matrix for the Model : "+"\n",confusion_matrix(y_test,predictions))
print("Accuracy of the model : ",accuracy_score(y_test,predictions))
print("Precision of the Model : ",precision_score(y_test,predictions))
print("Recall score of the Model : ",recall_score(y_test,predictions))
print("Area under ROC curve for the Model : ",roc_auc_score(y_test,prediction_scores))
from lightgbm import LGBMClassifier
clf=LGBMClassifier(boosting_type='gbdt', class_weight=None, colsample_bytree=0.9,
importance_type='gain', learning_rate=0.15 ,max_depth=-1,
min_child_samples=50, min_child_weight=0.001, min_split_gain=0.0,
n_estimators=100, n_jobs=-1, num_leaves=500, objective=None,
random_state=42, reg_alpha=0, reg_lambda=0.0, silent=False,
subsample=0.9,subsample_freq=1)
%%time
clf.fit(X_train,y_train)
Wall time: 1min 55s
LGBMClassifier(boosting_type='gbdt', class_weight=None, colsample_bytree=0.9,
importance_type='gain', learning_rate=0.15, max_depth=-1,
min_child_samples=50, min_child_weight=0.001, min_split_gain=0.0,
n_estimators=100, n_jobs=-1, num_leaves=500, objective=None,
random_state=42, reg_alpha=0, reg_lambda=0.0, silent=False,
subsample=0.9, subsample_for_bin=200000, subsample_freq=1)
predictions=clf.predict(X_test)
prediction_scores=clf.predict_proba(X_test)
scores=[]
for score in prediction_scores:
scores.append(score[1])
C:\Users\gandh\Anaconda3\lib\site-packages\sklearn\preprocessing\label.py:151: DeprecationWarning: The truth value of an empty array is ambiguous. Returning False, but in future this will result in an error. Use `array.size > 0` to check that an array is not empty. if diff:
print("Confusion Matrix for the Model : "+"\n",confusion_matrix(y_test,predictions))
print("Accuracy of the model : ",accuracy_score(y_test,predictions))
print("Precision of the Model : ",precision_score(y_test,predictions))
print("Recall score of the Model : ",recall_score(y_test,predictions))
print("Area under ROC curve for the Model : ",roc_auc_score(y_test,scores))
Confusion Matrix for the Model : [[447455 221472] [231852 437330]] Accuracy of the model : 0.6612204237472433 Precision of the Model : 0.6638261571762077 Recall score of the Model : 0.6535292341993658 Area under ROC curve for the Model : 0.7264697348603679
prediction_scores=clf.predict_proba(df_test[X.columns])
scores=[]
for score in prediction_scores:
scores.append(score[1])
%%time
solution['HasDetections']=scores
solution.to_csv('submit_31st.csv',index=False)
Wall time: 1min 4s
solution.head()
| MachineIdentifier | HasDetections | |
|---|---|---|
| 0 | 0000010489e3af074adeac69c53e555e | 0.599641 |
| 1 | 00000176ac758d54827acd545b6315a5 | 0.748345 |
| 2 | 0000019dcefc128c2d4387c1273dae1d | 0.454333 |
| 3 | 0000055553dc51b1295785415f1a224d | 0.469111 |
| 4 | 00000574cefffeca83ec8adf9285b2bf | 0.662187 |
temp=pd.DataFrame()
temp['features']=X_train.columns[np.argsort(clf.feature_importances_)]
temp['importance']=np.sort(clf.feature_importances_)
temp.sort_values(by=['importance'],ascending=False,inplace=True)
temp
| features | importance | |
|---|---|---|
| 108 | SmartScreen | 402944.766618 |
| 107 | AVProductStatesIdentifier | 280888.551537 |
| 106 | EngineVersion | 89252.064796 |
| 105 | AppVersion | 80002.517550 |
| 104 | CountryIdentifier | 54711.030009 |
| 103 | Interaction_02 | 50108.271050 |
| 102 | Census_OSInstallTypeName | 49610.878160 |
| 101 | Device_PossibleOwnership | 47137.113151 |
| 100 | Census_OSVersion | 38533.994658 |
| 99 | Wdft_IsGamer | 37674.048997 |
| 98 | PPI | 31586.841548 |
| 97 | Wdft_RegionIdentifier | 30702.778116 |
| 96 | Census_ProcessorModelIdentifier | 30316.556373 |
| 95 | CityIdentifier | 29678.426420 |
| 94 | Interaction_03 | 27916.432337 |
| 93 | LocaleEnglishNameIdentifier | 23723.240506 |
| 92 | Census_FirmwareVersionIdentifier | 23687.421844 |
| 91 | Census_IsAlwaysOnAlwaysConnectedCapable | 23577.176166 |
| 90 | Census_OSInstallLanguageIdentifier | 23491.129490 |
| 89 | Census_OEMModelIdentifier | 23072.982218 |
| 88 | OsBuildLab | 22961.664022 |
| 87 | Census_ActivationChannel | 22429.254086 |
| 86 | Census_IsVirtualDevice | 21808.445312 |
| 85 | GeoNameIdentifier | 19510.442236 |
| 84 | Census_OEMNameIdentifier | 18945.871079 |
| 83 | Census_OSUILocaleIdentifier | 17960.874937 |
| 82 | IeVerIdentifier | 16503.889473 |
| 81 | RtpStateBitfield | 16277.195962 |
| 80 | Processor | 16081.757362 |
| 79 | Census_OSEdition | 15835.934268 |
| ... | ... | ... |
| 29 | Interaction_MYYear_16Month_5 | 163.782850 |
| 28 | Interaction_MYYear_17Month_4 | 124.332320 |
| 27 | Interaction_MYYear_17Month_12 | 120.278489 |
| 26 | Interaction_MYYear_16Month_10 | 102.264990 |
| 25 | Interaction_MYYear_18Month_2 | 95.210730 |
| 24 | Interaction_MYYear_16Month_2 | 90.008140 |
| 23 | Interaction_MYYear_16Month_9 | 87.078200 |
| 22 | Interaction_MYYear_16Month_4 | 85.361409 |
| 21 | Interaction_MYYear_16Month_8 | 79.674981 |
| 20 | Interaction_MYYear_17Month_11 | 62.202849 |
| 19 | Interaction_MYYear_15Month_11 | 45.983190 |
| 18 | Interaction_MYYear_17Month_10 | 39.600840 |
| 17 | Interaction_MYYear_16Month_3 | 30.422991 |
| 16 | Interaction_MYYear_18Month_9 | 29.952830 |
| 15 | Interaction_MYYear_16Month_1 | 19.081050 |
| 14 | Interaction_MYYear_17Month_1 | 7.404160 |
| 13 | Interaction_MYYear_15Month_3 | 6.556640 |
| 12 | Interaction_MYYear_17Month_5 | 6.225560 |
| 11 | Interaction_MYYear_17Month_8 | 5.517140 |
| 3 | Interaction_MYYear_14Month_7 | 0.000000 |
| 4 | Interaction_MYYear_17Month_2 | 0.000000 |
| 6 | Interaction_MYYear_14Month_10 | 0.000000 |
| 2 | Interaction_MYYear_14Month_3 | 0.000000 |
| 5 | Interaction_MYYear_18Month_1 | 0.000000 |
| 1 | AV_highrisk | 0.000000 |
| 7 | Interaction_MYYear_15Month_9 | 0.000000 |
| 8 | Interaction_MYYear_15Month_8 | 0.000000 |
| 9 | Interaction_MYYear_15Month_1 | 0.000000 |
| 10 | Census_DeviceFamily | 0.000000 |
| 0 | Census_ThresholdOptIn | 0.000000 |
109 rows × 2 columns
important_columns=temp[temp.importance>0]['features'].tolist()
from sklearn.feature_selection import RFE
#selector = RFE(clf, 75, step=1,verbose=True)
%%time
#selector=selector.fit(X_train, y_train)
Wall time: 0 ns
#print(selector.support_)
#print(selector.ranking_)
clf2=LGBMClassifier(boosting_type='gbdt', class_weight=None, colsample_bytree=0.9,
importance_type='gain', learning_rate=0.15, max_depth=-1,
min_child_samples=2, min_child_weight=0.001, min_split_gain=0.0,
n_estimators=500, n_jobs=-1, num_leaves=500, objective=None,
random_state=42, reg_alpha=0.7, reg_lambda=0.7, silent=False,
subsample=0.9, subsample_for_bin=100000, subsample_freq=1)
%%time
clf2.fit(X_train[important_columns],y_train)
Wall time: 4min 28s
LGBMClassifier(boosting_type='gbdt', class_weight=None, colsample_bytree=0.9,
importance_type='gain', learning_rate=0.15, max_depth=-1,
min_child_samples=2, min_child_weight=0.001, min_split_gain=0.0,
n_estimators=500, n_jobs=-1, num_leaves=500, objective=None,
random_state=42, reg_alpha=0.7, reg_lambda=0.7, silent=False,
subsample=0.9, subsample_for_bin=100000, subsample_freq=1)
predictions=clf2.predict(X_test[important_columns])
prediction_scores=clf2.predict_proba(X_test[important_columns])
scores=[]
for score in prediction_scores:
scores.append(score[1])
C:\Users\gandh\Anaconda3\lib\site-packages\sklearn\preprocessing\label.py:151: DeprecationWarning: The truth value of an empty array is ambiguous. Returning False, but in future this will result in an error. Use `array.size > 0` to check that an array is not empty. if diff:
print("Confusion Matrix for the Model : "+"\n",confusion_matrix(y_test,predictions))
print("Accuracy of the model : ",accuracy_score(y_test,predictions))
print("Precision of the Model : ",precision_score(y_test,predictions))
print("Recall score of the Model : ",recall_score(y_test,predictions))
print("Area under ROC curve for the Model : ",roc_auc_score(y_test,scores))
Confusion Matrix for the Model : [[444490 224437] [230343 438839]] Accuracy of the model : 0.6601323210590467 Precision of the Model : 0.6616235172085225 Recall score of the Model : 0.6557842261148686 Area under ROC curve for the Model : 0.7247767926123824
prediction_scores=clf2.predict_proba(df_test[important_columns])
scores=[]
for score in prediction_scores:
scores.append(score[1])
%%time
solution['HasDetections']=scores
solution.to_csv('submit_31st.csv',index=False)
Wall time: 30.8 s
clf3=LGBMClassifier(boosting_type='gbdt', class_weight=None, colsample_bytree=0.9,
importance_type='gain', learning_rate=0.15, max_depth=-1,
min_child_samples=2, min_child_weight=0.001, min_split_gain=0.0,
n_estimators=1000, n_jobs=-1, num_leaves=100, objective=None,
random_state=42, reg_alpha=0.9, reg_lambda=0.9, silent=False,
subsample=0.9, subsample_for_bin=200000, subsample_freq=1)
%%time
clf2.fit(X_train[important_columns],y_train)
Wall time: 5min 15s
LGBMClassifier(boosting_type='gbdt', class_weight=None, colsample_bytree=0.9,
importance_type='gain', learning_rate=0.15, max_depth=-1,
min_child_samples=2, min_child_weight=0.001, min_split_gain=0.0,
n_estimators=500, n_jobs=-1, num_leaves=500, objective=None,
random_state=42, reg_alpha=0.7, reg_lambda=0.7, silent=False,
subsample=0.9, subsample_for_bin=100000, subsample_freq=1)
predictions=clf2.predict(X_test[important_columns])
prediction_scores=clf2.predict_proba(X_test[important_columns])
scores=[]
for score in prediction_scores:
scores.append(score[1])
C:\Users\gandh\Anaconda3\lib\site-packages\sklearn\preprocessing\label.py:151: DeprecationWarning: The truth value of an empty array is ambiguous. Returning False, but in future this will result in an error. Use `array.size > 0` to check that an array is not empty. if diff:
print("Confusion Matrix for the Model : "+"\n",confusion_matrix(y_test,predictions))
print("Accuracy of the model : ",accuracy_score(y_test,predictions))
print("Precision of the Model : ",precision_score(y_test,predictions))
print("Recall score of the Model : ",recall_score(y_test,predictions))
print("Area under ROC curve for the Model : ",roc_auc_score(y_test,scores))
Confusion Matrix for the Model : [[444490 224437] [230343 438839]] Accuracy of the model : 0.6601323210590467 Precision of the Model : 0.6616235172085225 Recall score of the Model : 0.6557842261148686 Area under ROC curve for the Model : 0.7247767926123824
#clf=DecisionTreeClassifier(random_state=42)
import keras
Using TensorFlow backend.
from keras.models import Sequential
from keras.layers import Dense
from keras import layers
# Cross-validation
from sklearn.model_selection import KFold, StratifiedKFold, KFold #for K-fold cross validation
from sklearn.model_selection import cross_val_score #score evaluation
from sklearn.model_selection import cross_val_predict #prediction
from sklearn.model_selection import cross_validate
from keras.callbacks import EarlyStopping, ModelCheckpoint
# fix random seed for reproducibility
np.random.seed(7)
len(important_columns)
98
X.shape
(4460362, 109)
classifier = Sequential()
#First Hidden Layer
classifier.add(layers.Dropout(0.5, input_shape=(109,)),)
#Second Hidden Layer
classifier.add(layers.Dense(units=20, activation='relu'))
# Add a dropout layer for previous hidden layer
classifier.add(layers.Dropout(0.5))
#classifier.add(layers.Dense(units=20, activation='relu'))
#classifier.add(layers.MaxPool1D(pool_size=4,data_format='channels_first'))
#classifier.add(layers.Dropout(0.5))
#classifier.add(layers.Dense(units=10, activation='relu'))
#classifier.add(layers.Dropout(0.5))
#Output Layer
classifier.add(Dense(1, activation='sigmoid', kernel_initializer='random_normal'))
#Compiling the neural network
classifier.compile(optimizer ='adam',loss='binary_crossentropy', metrics =['accuracy'])
# Set callback functions to early stop training and save the best model so far
callbacks1 = [EarlyStopping(monitor='val_loss', patience=5,min_delta=0,verbose=1),ModelCheckpoint(filepath='best_model2.h5', monitor='val_loss', save_best_only=True)]
# Set callback functions to early stop training and save the best model so far
callbacks2 = [EarlyStopping(monitor='val_loss', patience=10,min_delta=0,verbose=1),ModelCheckpoint(filepath='best_model3.h5', monitor='val_loss', save_best_only=True)]
%%time
# Fit the model
classifier.fit(X_train, y_train, epochs=100, batch_size=10000,validation_data=(X_val, y_val),callbacks=callbacks1)
Train on 2497802 samples, validate on 624451 samples Epoch 1/100 2210000/2497802 [=========================>....] - ETA: 4:29 - loss: 0.7096 - acc: 0.493 - ETA: 2:31 - loss: 0.7085 - acc: 0.495 - ETA: 1:48 - loss: 0.7072 - acc: 0.495 - ETA: 1:24 - loss: 0.7065 - acc: 0.496 - ETA: 1:10 - loss: 0.7056 - acc: 0.498 - ETA: 1:01 - loss: 0.7054 - acc: 0.498 - ETA: 55s - loss: 0.7053 - acc: 0.498 - ETA: 50s - loss: 0.7053 - acc: 0.49 - ETA: 45s - loss: 0.7050 - acc: 0.49 - ETA: 43s - loss: 0.7048 - acc: 0.49 - ETA: 40s - loss: 0.7043 - acc: 0.50 - ETA: 38s - loss: 0.7036 - acc: 0.50 - ETA: 36s - loss: 0.7029 - acc: 0.50 - ETA: 34s - loss: 0.7025 - acc: 0.50 - ETA: 33s - loss: 0.7023 - acc: 0.50 - ETA: 32s - loss: 0.7022 - acc: 0.50 - ETA: 31s - loss: 0.7019 - acc: 0.50 - ETA: 30s - loss: 0.7016 - acc: 0.50 - ETA: 29s - loss: 0.7014 - acc: 0.50 - ETA: 28s - loss: 0.7012 - acc: 0.50 - ETA: 27s - loss: 0.7010 - acc: 0.50 - ETA: 27s - loss: 0.7008 - acc: 0.50 - ETA: 26s - loss: 0.7005 - acc: 0.50 - ETA: 25s - loss: 0.7001 - acc: 0.50 - ETA: 25s - loss: 0.6998 - acc: 0.50 - ETA: 24s - loss: 0.6995 - acc: 0.51 - ETA: 24s - loss: 0.6992 - acc: 0.51 - ETA: 24s - loss: 0.6989 - acc: 0.51 - ETA: 23s - loss: 0.6988 - acc: 0.51 - ETA: 23s - loss: 0.6986 - acc: 0.51 - ETA: 22s - loss: 0.6984 - acc: 0.51 - ETA: 22s - loss: 0.6982 - acc: 0.51 - ETA: 22s - loss: 0.6980 - acc: 0.51 - ETA: 21s - loss: 0.6978 - acc: 0.51 - ETA: 21s - loss: 0.6976 - acc: 0.51 - ETA: 21s - loss: 0.6974 - acc: 0.51 - ETA: 21s - loss: 0.6971 - acc: 0.51 - ETA: 20s - loss: 0.6970 - acc: 0.51 - ETA: 20s - loss: 0.6968 - acc: 0.51 - ETA: 20s - loss: 0.6967 - acc: 0.51 - ETA: 20s - loss: 0.6965 - acc: 0.51 - ETA: 19s - loss: 0.6964 - acc: 0.51 - ETA: 19s - loss: 0.6962 - acc: 0.51 - ETA: 19s - loss: 0.6960 - acc: 0.51 - ETA: 19s - loss: 0.6958 - acc: 0.51 - ETA: 18s - loss: 0.6957 - acc: 0.51 - ETA: 18s - loss: 0.6956 - acc: 0.51 - ETA: 18s - loss: 0.6955 - acc: 0.52 - ETA: 18s - loss: 0.6953 - acc: 0.52 - ETA: 18s - loss: 0.6952 - acc: 0.52 - ETA: 17s - loss: 0.6950 - acc: 0.52 - ETA: 17s - loss: 0.6948 - acc: 0.52 - ETA: 17s - loss: 0.6947 - acc: 0.52 - ETA: 17s - loss: 0.6946 - acc: 0.52 - ETA: 17s - loss: 0.6945 - acc: 0.52 - ETA: 17s - loss: 0.6943 - acc: 0.52 - ETA: 16s - loss: 0.6942 - acc: 0.52 - ETA: 16s - loss: 0.6941 - acc: 0.52 - ETA: 16s - loss: 0.6940 - acc: 0.52 - ETA: 16s - loss: 0.6939 - acc: 0.52 - ETA: 16s - loss: 0.6938 - acc: 0.52 - ETA: 16s - loss: 0.6937 - acc: 0.52 - ETA: 16s - loss: 0.6935 - acc: 0.52 - ETA: 15s - loss: 0.6934 - acc: 0.52 - ETA: 15s - loss: 0.6933 - acc: 0.52 - ETA: 15s - loss: 0.6932 - acc: 0.52 - ETA: 15s - loss: 0.6931 - acc: 0.52 - ETA: 15s - loss: 0.6930 - acc: 0.52 - ETA: 14s - loss: 0.6928 - acc: 0.52 - ETA: 14s - loss: 0.6927 - acc: 0.52 - ETA: 14s - loss: 0.6926 - acc: 0.52 - ETA: 14s - loss: 0.6925 - acc: 0.52 - ETA: 14s - loss: 0.6924 - acc: 0.52 - ETA: 14s - loss: 0.6923 - acc: 0.52 - ETA: 14s - loss: 0.6922 - acc: 0.53 - ETA: 14s - loss: 0.6922 - acc: 0.53 - ETA: 13s - loss: 0.6921 - acc: 0.53 - ETA: 13s - loss: 0.6920 - acc: 0.53 - ETA: 13s - loss: 0.6919 - acc: 0.53 - ETA: 13s - loss: 0.6918 - acc: 0.53 - ETA: 13s - loss: 0.6917 - acc: 0.53 - ETA: 13s - loss: 0.6916 - acc: 0.53 - ETA: 13s - loss: 0.6915 - acc: 0.53 - ETA: 13s - loss: 0.6915 - acc: 0.53 - ETA: 12s - loss: 0.6914 - acc: 0.53 - ETA: 12s - loss: 0.6913 - acc: 0.53 - ETA: 12s - loss: 0.6912 - acc: 0.53 - ETA: 12s - loss: 0.6911 - acc: 0.53 - ETA: 12s - loss: 0.6910 - acc: 0.53 - ETA: 12s - loss: 0.6909 - acc: 0.53 - ETA: 12s - loss: 0.6908 - acc: 0.53 - ETA: 12s - loss: 0.6907 - acc: 0.53 - ETA: 12s - loss: 0.6907 - acc: 0.53 - ETA: 12s - loss: 0.6906 - acc: 0.53 - ETA: 11s - loss: 0.6905 - acc: 0.53 - ETA: 11s - loss: 0.6905 - acc: 0.53 - ETA: 11s - loss: 0.6904 - acc: 0.53 - ETA: 11s - loss: 0.6903 - acc: 0.53 - ETA: 11s - loss: 0.6902 - acc: 0.53 - ETA: 11s - loss: 0.6901 - acc: 0.53 - ETA: 11s - loss: 0.6901 - acc: 0.53 - ETA: 11s - loss: 0.6900 - acc: 0.53 - ETA: 11s - loss: 0.6899 - acc: 0.53 - ETA: 11s - loss: 0.6898 - acc: 0.53 - ETA: 10s - loss: 0.6897 - acc: 0.53 - ETA: 10s - loss: 0.6896 - acc: 0.53 - ETA: 10s - loss: 0.6896 - acc: 0.53 - ETA: 10s - loss: 0.6895 - acc: 0.53 - ETA: 10s - loss: 0.6895 - acc: 0.53 - ETA: 10s - loss: 0.6894 - acc: 0.53 - ETA: 10s - loss: 0.6893 - acc: 0.53 - ETA: 10s - loss: 0.6893 - acc: 0.53 - ETA: 10s - loss: 0.6892 - acc: 0.53 - ETA: 10s - loss: 0.6891 - acc: 0.53 - ETA: 10s - loss: 0.6890 - acc: 0.53 - ETA: 9s - loss: 0.6890 - acc: 0.5395 - ETA: 9s - loss: 0.6889 - acc: 0.539 - ETA: 9s - loss: 0.6888 - acc: 0.539 - ETA: 9s - loss: 0.6887 - acc: 0.540 - ETA: 9s - loss: 0.6887 - acc: 0.540 - ETA: 9s - loss: 0.6886 - acc: 0.540 - ETA: 9s - loss: 0.6886 - acc: 0.540 - ETA: 9s - loss: 0.6885 - acc: 0.540 - ETA: 9s - loss: 0.6884 - acc: 0.541 - ETA: 9s - loss: 0.6884 - acc: 0.541 - ETA: 9s - loss: 0.6883 - acc: 0.541 - ETA: 8s - loss: 0.6882 - acc: 0.541 - ETA: 8s - loss: 0.6881 - acc: 0.541 - ETA: 8s - loss: 0.6880 - acc: 0.542 - ETA: 8s - loss: 0.6880 - acc: 0.542 - ETA: 8s - loss: 0.6879 - acc: 0.542 - ETA: 8s - loss: 0.6879 - acc: 0.542 - ETA: 8s - loss: 0.6878 - acc: 0.542 - ETA: 8s - loss: 0.6877 - acc: 0.543 - ETA: 8s - loss: 0.6877 - acc: 0.543 - ETA: 8s - loss: 0.6876 - acc: 0.543 - ETA: 8s - loss: 0.6876 - acc: 0.543 - ETA: 7s - loss: 0.6875 - acc: 0.543 - ETA: 7s - loss: 0.6874 - acc: 0.543 - ETA: 7s - loss: 0.6874 - acc: 0.544 - ETA: 7s - loss: 0.6873 - acc: 0.544 - ETA: 7s - loss: 0.6872 - acc: 0.544 - ETA: 7s - loss: 0.6871 - acc: 0.544 - ETA: 7s - loss: 0.6871 - acc: 0.544 - ETA: 7s - loss: 0.6870 - acc: 0.545 - ETA: 7s - loss: 0.6870 - acc: 0.545 - ETA: 7s - loss: 0.6869 - acc: 0.545 - ETA: 7s - loss: 0.6868 - acc: 0.545 - ETA: 6s - loss: 0.6867 - acc: 0.545 - ETA: 6s - loss: 0.6867 - acc: 0.546 - ETA: 6s - loss: 0.6866 - acc: 0.546 - ETA: 6s - loss: 0.6866 - acc: 0.546 - ETA: 6s - loss: 0.6865 - acc: 0.546 - ETA: 6s - loss: 0.6865 - acc: 0.546 - ETA: 6s - loss: 0.6864 - acc: 0.546 - ETA: 6s - loss: 0.6863 - acc: 0.547 - ETA: 6s - loss: 0.6863 - acc: 0.547 - ETA: 6s - loss: 0.6862 - acc: 0.547 - ETA: 6s - loss: 0.6862 - acc: 0.547 - ETA: 5s - loss: 0.6861 - acc: 0.547 - ETA: 5s - loss: 0.6860 - acc: 0.547 - ETA: 5s - loss: 0.6859 - acc: 0.548 - ETA: 5s - loss: 0.6859 - acc: 0.548 - ETA: 5s - loss: 0.6858 - acc: 0.548 - ETA: 5s - loss: 0.6858 - acc: 0.548 - ETA: 5s - loss: 0.6857 - acc: 0.548 - ETA: 5s - loss: 0.6857 - acc: 0.549 - ETA: 5s - loss: 0.6856 - acc: 0.549 - ETA: 5s - loss: 0.6856 - acc: 0.549 - ETA: 5s - loss: 0.6855 - acc: 0.549 - ETA: 5s - loss: 0.6855 - acc: 0.549 - ETA: 5s - loss: 0.6855 - acc: 0.549 - ETA: 4s - loss: 0.6854 - acc: 0.549 - ETA: 4s - loss: 0.6854 - acc: 0.549 - ETA: 4s - loss: 0.6853 - acc: 0.550 - ETA: 4s - loss: 0.6852 - acc: 0.550 - ETA: 4s - loss: 0.6852 - acc: 0.550 - ETA: 4s - loss: 0.6851 - acc: 0.550 - ETA: 4s - loss: 0.6851 - acc: 0.550 - ETA: 4s - loss: 0.6851 - acc: 0.550 - ETA: 4s - loss: 0.6850 - acc: 0.550 - ETA: 4s - loss: 0.6850 - acc: 0.550 - ETA: 4s - loss: 0.6849 - acc: 0.551 - ETA: 4s - loss: 0.6849 - acc: 0.551 - ETA: 3s - loss: 0.6848 - acc: 0.551 - ETA: 3s - loss: 0.6848 - acc: 0.551 - ETA: 3s - loss: 0.6847 - acc: 0.551 - ETA: 3s - loss: 0.6847 - acc: 0.551 - ETA: 3s - loss: 0.6847 - acc: 0.551 - ETA: 3s - loss: 0.6846 - acc: 0.551 - ETA: 3s - loss: 0.6846 - acc: 0.551 - ETA: 3s - loss: 0.6845 - acc: 0.552 - ETA: 3s - loss: 0.6845 - acc: 0.552 - ETA: 3s - loss: 0.6845 - acc: 0.552 - ETA: 3s - loss: 0.6844 - acc: 0.552 - ETA: 3s - loss: 0.6844 - acc: 0.552 - ETA: 3s - loss: 0.6843 - acc: 0.552 - ETA: 3s - loss: 0.6843 - acc: 0.552 - ETA: 2s - loss: 0.6843 - acc: 0.552 - ETA: 2s - loss: 0.6842 - acc: 0.552 - ETA: 2s - loss: 0.6842 - acc: 0.553 - ETA: 2s - loss: 0.6841 - acc: 0.553 - ETA: 2s - loss: 0.6841 - acc: 0.553 - ETA: 2s - loss: 0.6840 - acc: 0.553 - ETA: 2s - loss: 0.6840 - acc: 0.553 - ETA: 2s - loss: 0.6839 - acc: 0.553 - ETA: 2s - loss: 0.6839 - acc: 0.553 - ETA: 2s - loss: 0.6838 - acc: 0.553 - ETA: 2s - loss: 0.6838 - acc: 0.553 - ETA: 2s - loss: 0.6837 - acc: 0.554 - ETA: 2s - loss: 0.6837 - acc: 0.554 - ETA: 2s - loss: 0.6836 - acc: 0.554 - ETA: 1s - loss: 0.6836 - acc: 0.5542497802/2497802 [==============================] - ETA: 1s - loss: 0.6835 - acc: 0.554 - ETA: 1s - loss: 0.6835 - acc: 0.554 - ETA: 1s - loss: 0.6835 - acc: 0.554 - ETA: 1s - loss: 0.6834 - acc: 0.554 - ETA: 1s - loss: 0.6833 - acc: 0.555 - ETA: 1s - loss: 0.6833 - acc: 0.555 - ETA: 1s - loss: 0.6833 - acc: 0.555 - ETA: 1s - loss: 0.6832 - acc: 0.555 - ETA: 1s - loss: 0.6832 - acc: 0.555 - ETA: 1s - loss: 0.6832 - acc: 0.555 - ETA: 1s - loss: 0.6831 - acc: 0.555 - ETA: 1s - loss: 0.6831 - acc: 0.555 - ETA: 0s - loss: 0.6830 - acc: 0.555 - ETA: 0s - loss: 0.6830 - acc: 0.556 - ETA: 0s - loss: 0.6829 - acc: 0.556 - ETA: 0s - loss: 0.6829 - acc: 0.556 - ETA: 0s - loss: 0.6828 - acc: 0.556 - ETA: 0s - loss: 0.6828 - acc: 0.556 - ETA: 0s - loss: 0.6828 - acc: 0.556 - ETA: 0s - loss: 0.6827 - acc: 0.556 - ETA: 0s - loss: 0.6827 - acc: 0.556 - ETA: 0s - loss: 0.6827 - acc: 0.556 - ETA: 0s - loss: 0.6826 - acc: 0.556 - ETA: 0s - loss: 0.6826 - acc: 0.556 - ETA: 0s - loss: 0.6826 - acc: 0.557 - ETA: 0s - loss: 0.6825 - acc: 0.557 - ETA: 0s - loss: 0.6825 - acc: 0.557 - 18s 7us/step - loss: 0.6824 - acc: 0.5572 - val_loss: 0.6614 - val_acc: 0.6152 Epoch 2/100 2240000/2497802 [=========================>....] - ETA: 41s - loss: 0.6730 - acc: 0.58 - ETA: 28s - loss: 0.6739 - acc: 0.57 - ETA: 24s - loss: 0.6729 - acc: 0.57 - ETA: 22s - loss: 0.6730 - acc: 0.57 - ETA: 21s - loss: 0.6735 - acc: 0.57 - ETA: 20s - loss: 0.6732 - acc: 0.57 - ETA: 19s - loss: 0.6730 - acc: 0.57 - ETA: 18s - loss: 0.6735 - acc: 0.57 - ETA: 18s - loss: 0.6737 - acc: 0.57 - ETA: 18s - loss: 0.6732 - acc: 0.57 - ETA: 17s - loss: 0.6732 - acc: 0.57 - ETA: 17s - loss: 0.6731 - acc: 0.57 - ETA: 16s - loss: 0.6731 - acc: 0.57 - ETA: 16s - loss: 0.6732 - acc: 0.57 - ETA: 16s - loss: 0.6731 - acc: 0.57 - ETA: 16s - loss: 0.6730 - acc: 0.57 - ETA: 16s - loss: 0.6729 - acc: 0.57 - ETA: 16s - loss: 0.6729 - acc: 0.57 - ETA: 15s - loss: 0.6729 - acc: 0.57 - ETA: 15s - loss: 0.6731 - acc: 0.57 - ETA: 15s - loss: 0.6731 - acc: 0.57 - ETA: 15s - loss: 0.6731 - acc: 0.57 - ETA: 15s - loss: 0.6731 - acc: 0.57 - ETA: 15s - loss: 0.6729 - acc: 0.57 - ETA: 15s - loss: 0.6730 - acc: 0.57 - ETA: 14s - loss: 0.6730 - acc: 0.57 - ETA: 14s - loss: 0.6730 - acc: 0.57 - ETA: 14s - loss: 0.6731 - acc: 0.57 - ETA: 14s - loss: 0.6731 - acc: 0.57 - ETA: 14s - loss: 0.6731 - acc: 0.57 - ETA: 14s - loss: 0.6729 - acc: 0.57 - ETA: 14s - loss: 0.6729 - acc: 0.57 - ETA: 14s - loss: 0.6729 - acc: 0.57 - ETA: 14s - loss: 0.6729 - acc: 0.57 - ETA: 14s - loss: 0.6729 - acc: 0.57 - ETA: 13s - loss: 0.6728 - acc: 0.57 - ETA: 13s - loss: 0.6728 - acc: 0.57 - ETA: 13s - loss: 0.6728 - acc: 0.57 - ETA: 13s - loss: 0.6728 - acc: 0.57 - ETA: 13s - loss: 0.6728 - acc: 0.57 - ETA: 13s - loss: 0.6727 - acc: 0.57 - ETA: 13s - loss: 0.6728 - acc: 0.57 - ETA: 13s - loss: 0.6728 - acc: 0.57 - ETA: 13s - loss: 0.6727 - acc: 0.57 - ETA: 13s - loss: 0.6727 - acc: 0.57 - ETA: 13s - loss: 0.6727 - acc: 0.58 - ETA: 13s - loss: 0.6726 - acc: 0.58 - ETA: 13s - loss: 0.6726 - acc: 0.58 - ETA: 12s - loss: 0.6726 - acc: 0.58 - ETA: 12s - loss: 0.6725 - acc: 0.58 - ETA: 12s - loss: 0.6725 - acc: 0.58 - ETA: 12s - loss: 0.6725 - acc: 0.58 - ETA: 12s - loss: 0.6725 - acc: 0.58 - ETA: 12s - loss: 0.6725 - acc: 0.58 - ETA: 12s - loss: 0.6724 - acc: 0.58 - ETA: 12s - loss: 0.6723 - acc: 0.58 - ETA: 12s - loss: 0.6723 - acc: 0.58 - ETA: 12s - loss: 0.6724 - acc: 0.58 - ETA: 12s - loss: 0.6723 - acc: 0.58 - ETA: 12s - loss: 0.6724 - acc: 0.58 - ETA: 12s - loss: 0.6724 - acc: 0.58 - ETA: 11s - loss: 0.6724 - acc: 0.58 - ETA: 11s - loss: 0.6725 - acc: 0.58 - ETA: 11s - loss: 0.6725 - acc: 0.58 - ETA: 11s - loss: 0.6724 - acc: 0.58 - ETA: 11s - loss: 0.6724 - acc: 0.58 - ETA: 11s - loss: 0.6724 - acc: 0.58 - ETA: 11s - loss: 0.6724 - acc: 0.58 - ETA: 11s - loss: 0.6724 - acc: 0.58 - ETA: 11s - loss: 0.6724 - acc: 0.58 - ETA: 11s - loss: 0.6724 - acc: 0.58 - ETA: 11s - loss: 0.6723 - acc: 0.58 - ETA: 11s - loss: 0.6723 - acc: 0.58 - ETA: 11s - loss: 0.6723 - acc: 0.58 - ETA: 10s - loss: 0.6723 - acc: 0.58 - ETA: 10s - loss: 0.6723 - acc: 0.58 - ETA: 10s - loss: 0.6723 - acc: 0.58 - ETA: 10s - loss: 0.6723 - acc: 0.58 - ETA: 10s - loss: 0.6723 - acc: 0.58 - ETA: 10s - loss: 0.6723 - acc: 0.58 - ETA: 10s - loss: 0.6723 - acc: 0.58 - ETA: 10s - loss: 0.6723 - acc: 0.58 - ETA: 10s - loss: 0.6723 - acc: 0.58 - ETA: 10s - loss: 0.6723 - acc: 0.58 - ETA: 10s - loss: 0.6724 - acc: 0.58 - ETA: 10s - loss: 0.6724 - acc: 0.58 - ETA: 10s - loss: 0.6723 - acc: 0.58 - ETA: 10s - loss: 0.6723 - acc: 0.58 - ETA: 9s - loss: 0.6723 - acc: 0.5806 - ETA: 9s - loss: 0.6722 - acc: 0.580 - ETA: 9s - loss: 0.6722 - acc: 0.580 - ETA: 9s - loss: 0.6722 - acc: 0.580 - ETA: 9s - loss: 0.6722 - acc: 0.580 - ETA: 9s - loss: 0.6722 - acc: 0.580 - ETA: 9s - loss: 0.6722 - acc: 0.580 - ETA: 9s - loss: 0.6722 - acc: 0.580 - ETA: 9s - loss: 0.6721 - acc: 0.580 - ETA: 9s - loss: 0.6721 - acc: 0.580 - ETA: 9s - loss: 0.6721 - acc: 0.580 - ETA: 9s - loss: 0.6721 - acc: 0.580 - ETA: 9s - loss: 0.6721 - acc: 0.581 - ETA: 8s - loss: 0.6720 - acc: 0.581 - ETA: 8s - loss: 0.6720 - acc: 0.581 - ETA: 8s - loss: 0.6720 - acc: 0.581 - ETA: 8s - loss: 0.6720 - acc: 0.581 - ETA: 8s - loss: 0.6720 - acc: 0.581 - ETA: 8s - loss: 0.6720 - acc: 0.581 - ETA: 8s - loss: 0.6720 - acc: 0.581 - ETA: 8s - loss: 0.6720 - acc: 0.581 - ETA: 8s - loss: 0.6720 - acc: 0.581 - ETA: 8s - loss: 0.6720 - acc: 0.581 - ETA: 8s - loss: 0.6720 - acc: 0.581 - ETA: 8s - loss: 0.6719 - acc: 0.581 - ETA: 8s - loss: 0.6719 - acc: 0.581 - ETA: 8s - loss: 0.6719 - acc: 0.581 - ETA: 7s - loss: 0.6719 - acc: 0.581 - ETA: 7s - loss: 0.6719 - acc: 0.581 - ETA: 7s - loss: 0.6719 - acc: 0.581 - ETA: 7s - loss: 0.6719 - acc: 0.581 - ETA: 7s - loss: 0.6719 - acc: 0.581 - ETA: 7s - loss: 0.6718 - acc: 0.581 - ETA: 7s - loss: 0.6718 - acc: 0.581 - ETA: 7s - loss: 0.6718 - acc: 0.581 - ETA: 7s - loss: 0.6718 - acc: 0.581 - ETA: 7s - loss: 0.6718 - acc: 0.581 - ETA: 7s - loss: 0.6718 - acc: 0.581 - ETA: 7s - loss: 0.6718 - acc: 0.581 - ETA: 7s - loss: 0.6717 - acc: 0.581 - ETA: 7s - loss: 0.6718 - acc: 0.581 - ETA: 7s - loss: 0.6718 - acc: 0.581 - ETA: 6s - loss: 0.6718 - acc: 0.581 - ETA: 6s - loss: 0.6718 - acc: 0.581 - ETA: 6s - loss: 0.6718 - acc: 0.581 - ETA: 6s - loss: 0.6717 - acc: 0.581 - ETA: 6s - loss: 0.6717 - acc: 0.581 - ETA: 6s - loss: 0.6717 - acc: 0.581 - ETA: 6s - loss: 0.6717 - acc: 0.581 - ETA: 6s - loss: 0.6718 - acc: 0.581 - ETA: 6s - loss: 0.6718 - acc: 0.581 - ETA: 6s - loss: 0.6717 - acc: 0.581 - ETA: 6s - loss: 0.6717 - acc: 0.581 - ETA: 6s - loss: 0.6717 - acc: 0.581 - ETA: 6s - loss: 0.6717 - acc: 0.581 - ETA: 6s - loss: 0.6717 - acc: 0.581 - ETA: 6s - loss: 0.6717 - acc: 0.581 - ETA: 5s - loss: 0.6717 - acc: 0.581 - ETA: 5s - loss: 0.6717 - acc: 0.581 - ETA: 5s - loss: 0.6717 - acc: 0.581 - ETA: 5s - loss: 0.6717 - acc: 0.581 - ETA: 5s - loss: 0.6717 - acc: 0.581 - ETA: 5s - loss: 0.6717 - acc: 0.581 - ETA: 5s - loss: 0.6717 - acc: 0.581 - ETA: 5s - loss: 0.6716 - acc: 0.581 - ETA: 5s - loss: 0.6716 - acc: 0.581 - ETA: 5s - loss: 0.6716 - acc: 0.582 - ETA: 5s - loss: 0.6716 - acc: 0.582 - ETA: 5s - loss: 0.6716 - acc: 0.582 - ETA: 5s - loss: 0.6716 - acc: 0.582 - ETA: 5s - loss: 0.6716 - acc: 0.582 - ETA: 5s - loss: 0.6716 - acc: 0.582 - ETA: 4s - loss: 0.6716 - acc: 0.582 - ETA: 4s - loss: 0.6716 - acc: 0.582 - ETA: 4s - loss: 0.6716 - acc: 0.582 - ETA: 4s - loss: 0.6716 - acc: 0.582 - ETA: 4s - loss: 0.6715 - acc: 0.582 - ETA: 4s - loss: 0.6715 - acc: 0.582 - ETA: 4s - loss: 0.6715 - acc: 0.582 - ETA: 4s - loss: 0.6715 - acc: 0.582 - ETA: 4s - loss: 0.6715 - acc: 0.582 - ETA: 4s - loss: 0.6715 - acc: 0.582 - ETA: 4s - loss: 0.6715 - acc: 0.582 - ETA: 4s - loss: 0.6715 - acc: 0.582 - ETA: 4s - loss: 0.6715 - acc: 0.582 - ETA: 4s - loss: 0.6715 - acc: 0.582 - ETA: 4s - loss: 0.6715 - acc: 0.582 - ETA: 4s - loss: 0.6714 - acc: 0.582 - ETA: 3s - loss: 0.6714 - acc: 0.582 - ETA: 3s - loss: 0.6714 - acc: 0.582 - ETA: 3s - loss: 0.6714 - acc: 0.582 - ETA: 3s - loss: 0.6714 - acc: 0.582 - ETA: 3s - loss: 0.6714 - acc: 0.582 - ETA: 3s - loss: 0.6714 - acc: 0.582 - ETA: 3s - loss: 0.6714 - acc: 0.582 - ETA: 3s - loss: 0.6714 - acc: 0.582 - ETA: 3s - loss: 0.6714 - acc: 0.582 - ETA: 3s - loss: 0.6714 - acc: 0.582 - ETA: 3s - loss: 0.6714 - acc: 0.582 - ETA: 3s - loss: 0.6714 - acc: 0.582 - ETA: 3s - loss: 0.6714 - acc: 0.582 - ETA: 3s - loss: 0.6714 - acc: 0.582 - ETA: 3s - loss: 0.6713 - acc: 0.582 - ETA: 3s - loss: 0.6713 - acc: 0.582 - ETA: 2s - loss: 0.6713 - acc: 0.582 - ETA: 2s - loss: 0.6713 - acc: 0.582 - ETA: 2s - loss: 0.6713 - acc: 0.582 - ETA: 2s - loss: 0.6713 - acc: 0.582 - ETA: 2s - loss: 0.6713 - acc: 0.582 - ETA: 2s - loss: 0.6713 - acc: 0.582 - ETA: 2s - loss: 0.6713 - acc: 0.582 - ETA: 2s - loss: 0.6713 - acc: 0.582 - ETA: 2s - loss: 0.6713 - acc: 0.582 - ETA: 2s - loss: 0.6713 - acc: 0.582 - ETA: 2s - loss: 0.6713 - acc: 0.582 - ETA: 2s - loss: 0.6712 - acc: 0.582 - ETA: 2s - loss: 0.6712 - acc: 0.582 - ETA: 2s - loss: 0.6712 - acc: 0.582 - ETA: 2s - loss: 0.6712 - acc: 0.582 - ETA: 2s - loss: 0.6712 - acc: 0.582 - ETA: 1s - loss: 0.6712 - acc: 0.582 - ETA: 1s - loss: 0.6712 - acc: 0.582 - ETA: 1s - loss: 0.6712 - acc: 0.582 - ETA: 1s - loss: 0.6712 - acc: 0.582 - ETA: 1s - loss: 0.6712 - acc: 0.582 - ETA: 1s - loss: 0.6712 - acc: 0.582 - ETA: 1s - loss: 0.6712 - acc: 0.58262497802/2497802 [==============================] - ETA: 1s - loss: 0.6712 - acc: 0.582 - ETA: 1s - loss: 0.6712 - acc: 0.582 - ETA: 1s - loss: 0.6712 - acc: 0.582 - ETA: 1s - loss: 0.6712 - acc: 0.582 - ETA: 1s - loss: 0.6712 - acc: 0.582 - ETA: 1s - loss: 0.6712 - acc: 0.582 - ETA: 1s - loss: 0.6711 - acc: 0.582 - ETA: 1s - loss: 0.6711 - acc: 0.582 - ETA: 0s - loss: 0.6711 - acc: 0.582 - ETA: 0s - loss: 0.6711 - acc: 0.582 - ETA: 0s - loss: 0.6711 - acc: 0.582 - ETA: 0s - loss: 0.6711 - acc: 0.582 - ETA: 0s - loss: 0.6711 - acc: 0.582 - ETA: 0s - loss: 0.6711 - acc: 0.582 - ETA: 0s - loss: 0.6711 - acc: 0.582 - ETA: 0s - loss: 0.6711 - acc: 0.582 - ETA: 0s - loss: 0.6710 - acc: 0.582 - ETA: 0s - loss: 0.6710 - acc: 0.582 - ETA: 0s - loss: 0.6710 - acc: 0.582 - ETA: 0s - loss: 0.6710 - acc: 0.582 - ETA: 0s - loss: 0.6710 - acc: 0.582 - ETA: 0s - loss: 0.6710 - acc: 0.582 - ETA: 0s - loss: 0.6710 - acc: 0.582 - ETA: 0s - loss: 0.6710 - acc: 0.583 - 16s 6us/step - loss: 0.6710 - acc: 0.5830 - val_loss: 0.6562 - val_acc: 0.6194 Epoch 3/100 2150000/2497802 [========================>.....] - ETA: 32s - loss: 0.6699 - acc: 0.58 - ETA: 23s - loss: 0.6708 - acc: 0.58 - ETA: 20s - loss: 0.6719 - acc: 0.58 - ETA: 18s - loss: 0.6712 - acc: 0.58 - ETA: 17s - loss: 0.6708 - acc: 0.58 - ETA: 17s - loss: 0.6702 - acc: 0.58 - ETA: 16s - loss: 0.6706 - acc: 0.58 - ETA: 16s - loss: 0.6704 - acc: 0.58 - ETA: 15s - loss: 0.6707 - acc: 0.58 - ETA: 15s - loss: 0.6705 - acc: 0.58 - ETA: 15s - loss: 0.6705 - acc: 0.58 - ETA: 15s - loss: 0.6705 - acc: 0.58 - ETA: 15s - loss: 0.6705 - acc: 0.58 - ETA: 15s - loss: 0.6705 - acc: 0.58 - ETA: 15s - loss: 0.6705 - acc: 0.58 - ETA: 15s - loss: 0.6703 - acc: 0.58 - ETA: 14s - loss: 0.6704 - acc: 0.58 - ETA: 14s - loss: 0.6705 - acc: 0.58 - ETA: 14s - loss: 0.6705 - acc: 0.58 - ETA: 14s - loss: 0.6704 - acc: 0.58 - ETA: 14s - loss: 0.6704 - acc: 0.58 - ETA: 14s - loss: 0.6701 - acc: 0.58 - ETA: 14s - loss: 0.6701 - acc: 0.58 - ETA: 14s - loss: 0.6700 - acc: 0.58 - ETA: 14s - loss: 0.6700 - acc: 0.58 - ETA: 14s - loss: 0.6701 - acc: 0.58 - ETA: 14s - loss: 0.6700 - acc: 0.58 - ETA: 14s - loss: 0.6700 - acc: 0.58 - ETA: 13s - loss: 0.6699 - acc: 0.58 - ETA: 13s - loss: 0.6698 - acc: 0.58 - ETA: 13s - loss: 0.6697 - acc: 0.58 - ETA: 13s - loss: 0.6698 - acc: 0.58 - ETA: 13s - loss: 0.6699 - acc: 0.58 - ETA: 13s - loss: 0.6699 - acc: 0.58 - ETA: 13s - loss: 0.6699 - acc: 0.58 - ETA: 13s - loss: 0.6699 - acc: 0.58 - ETA: 13s - loss: 0.6699 - acc: 0.58 - ETA: 13s - loss: 0.6698 - acc: 0.58 - ETA: 13s - loss: 0.6697 - acc: 0.58 - ETA: 13s - loss: 0.6698 - acc: 0.58 - ETA: 13s - loss: 0.6697 - acc: 0.58 - ETA: 13s - loss: 0.6697 - acc: 0.58 - ETA: 12s - loss: 0.6696 - acc: 0.58 - ETA: 12s - loss: 0.6695 - acc: 0.58 - ETA: 12s - loss: 0.6695 - acc: 0.58 - ETA: 12s - loss: 0.6696 - acc: 0.58 - ETA: 12s - loss: 0.6697 - acc: 0.58 - ETA: 12s - loss: 0.6696 - acc: 0.58 - ETA: 12s - loss: 0.6697 - acc: 0.58 - ETA: 12s - loss: 0.6696 - acc: 0.58 - ETA: 12s - loss: 0.6697 - acc: 0.58 - ETA: 12s - loss: 0.6698 - acc: 0.58 - ETA: 12s - loss: 0.6697 - acc: 0.58 - ETA: 12s - loss: 0.6697 - acc: 0.58 - ETA: 12s - loss: 0.6697 - acc: 0.58 - ETA: 12s - loss: 0.6698 - acc: 0.58 - ETA: 11s - loss: 0.6698 - acc: 0.58 - ETA: 11s - loss: 0.6697 - acc: 0.58 - ETA: 11s - loss: 0.6698 - acc: 0.58 - ETA: 11s - loss: 0.6697 - acc: 0.58 - ETA: 11s - loss: 0.6697 - acc: 0.58 - ETA: 11s - loss: 0.6697 - acc: 0.58 - ETA: 11s - loss: 0.6697 - acc: 0.58 - ETA: 11s - loss: 0.6698 - acc: 0.58 - ETA: 11s - loss: 0.6698 - acc: 0.58 - ETA: 11s - loss: 0.6698 - acc: 0.58 - ETA: 11s - loss: 0.6697 - acc: 0.58 - ETA: 11s - loss: 0.6697 - acc: 0.58 - ETA: 11s - loss: 0.6697 - acc: 0.58 - ETA: 11s - loss: 0.6697 - acc: 0.58 - ETA: 11s - loss: 0.6697 - acc: 0.58 - ETA: 11s - loss: 0.6697 - acc: 0.58 - ETA: 11s - loss: 0.6697 - acc: 0.58 - ETA: 11s - loss: 0.6696 - acc: 0.58 - ETA: 11s - loss: 0.6696 - acc: 0.58 - ETA: 10s - loss: 0.6696 - acc: 0.58 - ETA: 10s - loss: 0.6696 - acc: 0.58 - ETA: 10s - loss: 0.6696 - acc: 0.58 - ETA: 10s - loss: 0.6696 - acc: 0.58 - ETA: 10s - loss: 0.6696 - acc: 0.58 - ETA: 10s - loss: 0.6696 - acc: 0.58 - ETA: 10s - loss: 0.6696 - acc: 0.58 - ETA: 10s - loss: 0.6696 - acc: 0.58 - ETA: 10s - loss: 0.6696 - acc: 0.58 - ETA: 10s - loss: 0.6696 - acc: 0.58 - ETA: 10s - loss: 0.6696 - acc: 0.58 - ETA: 10s - loss: 0.6696 - acc: 0.58 - ETA: 10s - loss: 0.6696 - acc: 0.58 - ETA: 10s - loss: 0.6696 - acc: 0.58 - ETA: 10s - loss: 0.6696 - acc: 0.58 - ETA: 10s - loss: 0.6696 - acc: 0.58 - ETA: 10s - loss: 0.6696 - acc: 0.58 - ETA: 10s - loss: 0.6695 - acc: 0.58 - ETA: 9s - loss: 0.6695 - acc: 0.5857 - ETA: 9s - loss: 0.6695 - acc: 0.585 - ETA: 9s - loss: 0.6696 - acc: 0.585 - ETA: 9s - loss: 0.6695 - acc: 0.585 - ETA: 9s - loss: 0.6695 - acc: 0.585 - ETA: 9s - loss: 0.6696 - acc: 0.585 - ETA: 9s - loss: 0.6695 - acc: 0.585 - ETA: 9s - loss: 0.6695 - acc: 0.585 - ETA: 9s - loss: 0.6695 - acc: 0.585 - ETA: 9s - loss: 0.6695 - acc: 0.585 - ETA: 9s - loss: 0.6695 - acc: 0.585 - ETA: 9s - loss: 0.6695 - acc: 0.585 - ETA: 9s - loss: 0.6695 - acc: 0.585 - ETA: 9s - loss: 0.6695 - acc: 0.585 - ETA: 9s - loss: 0.6695 - acc: 0.585 - ETA: 9s - loss: 0.6695 - acc: 0.585 - ETA: 9s - loss: 0.6695 - acc: 0.585 - ETA: 8s - loss: 0.6695 - acc: 0.585 - ETA: 8s - loss: 0.6695 - acc: 0.585 - ETA: 8s - loss: 0.6695 - acc: 0.585 - ETA: 8s - loss: 0.6696 - acc: 0.585 - ETA: 8s - loss: 0.6696 - acc: 0.585 - ETA: 8s - loss: 0.6696 - acc: 0.585 - ETA: 8s - loss: 0.6696 - acc: 0.585 - ETA: 8s - loss: 0.6696 - acc: 0.585 - ETA: 8s - loss: 0.6696 - acc: 0.585 - ETA: 8s - loss: 0.6696 - acc: 0.585 - ETA: 8s - loss: 0.6696 - acc: 0.585 - ETA: 8s - loss: 0.6696 - acc: 0.585 - ETA: 8s - loss: 0.6696 - acc: 0.585 - ETA: 8s - loss: 0.6696 - acc: 0.585 - ETA: 8s - loss: 0.6695 - acc: 0.585 - ETA: 8s - loss: 0.6695 - acc: 0.585 - ETA: 7s - loss: 0.6695 - acc: 0.585 - ETA: 7s - loss: 0.6695 - acc: 0.585 - ETA: 7s - loss: 0.6695 - acc: 0.585 - ETA: 7s - loss: 0.6695 - acc: 0.585 - ETA: 7s - loss: 0.6695 - acc: 0.585 - ETA: 7s - loss: 0.6695 - acc: 0.585 - ETA: 7s - loss: 0.6695 - acc: 0.585 - ETA: 7s - loss: 0.6695 - acc: 0.585 - ETA: 7s - loss: 0.6695 - acc: 0.585 - ETA: 7s - loss: 0.6695 - acc: 0.585 - ETA: 7s - loss: 0.6695 - acc: 0.585 - ETA: 7s - loss: 0.6695 - acc: 0.585 - ETA: 7s - loss: 0.6695 - acc: 0.585 - ETA: 7s - loss: 0.6695 - acc: 0.585 - ETA: 7s - loss: 0.6695 - acc: 0.585 - ETA: 7s - loss: 0.6695 - acc: 0.585 - ETA: 6s - loss: 0.6695 - acc: 0.585 - ETA: 6s - loss: 0.6695 - acc: 0.585 - ETA: 6s - loss: 0.6695 - acc: 0.585 - ETA: 6s - loss: 0.6694 - acc: 0.586 - ETA: 6s - loss: 0.6695 - acc: 0.586 - ETA: 6s - loss: 0.6695 - acc: 0.586 - ETA: 6s - loss: 0.6694 - acc: 0.586 - ETA: 6s - loss: 0.6694 - acc: 0.586 - ETA: 6s - loss: 0.6694 - acc: 0.586 - ETA: 6s - loss: 0.6694 - acc: 0.586 - ETA: 6s - loss: 0.6694 - acc: 0.586 - ETA: 6s - loss: 0.6694 - acc: 0.586 - ETA: 6s - loss: 0.6694 - acc: 0.586 - ETA: 6s - loss: 0.6694 - acc: 0.586 - ETA: 6s - loss: 0.6694 - acc: 0.586 - ETA: 6s - loss: 0.6694 - acc: 0.586 - ETA: 5s - loss: 0.6694 - acc: 0.586 - ETA: 5s - loss: 0.6694 - acc: 0.586 - ETA: 5s - loss: 0.6694 - acc: 0.586 - ETA: 5s - loss: 0.6694 - acc: 0.586 - ETA: 5s - loss: 0.6694 - acc: 0.586 - ETA: 5s - loss: 0.6694 - acc: 0.586 - ETA: 5s - loss: 0.6694 - acc: 0.586 - ETA: 5s - loss: 0.6694 - acc: 0.586 - ETA: 5s - loss: 0.6694 - acc: 0.586 - ETA: 5s - loss: 0.6694 - acc: 0.586 - ETA: 5s - loss: 0.6694 - acc: 0.586 - ETA: 5s - loss: 0.6694 - acc: 0.586 - ETA: 5s - loss: 0.6694 - acc: 0.586 - ETA: 5s - loss: 0.6694 - acc: 0.586 - ETA: 5s - loss: 0.6694 - acc: 0.586 - ETA: 4s - loss: 0.6694 - acc: 0.586 - ETA: 4s - loss: 0.6694 - acc: 0.586 - ETA: 4s - loss: 0.6694 - acc: 0.586 - ETA: 4s - loss: 0.6694 - acc: 0.586 - ETA: 4s - loss: 0.6694 - acc: 0.586 - ETA: 4s - loss: 0.6694 - acc: 0.586 - ETA: 4s - loss: 0.6694 - acc: 0.586 - ETA: 4s - loss: 0.6694 - acc: 0.586 - ETA: 4s - loss: 0.6693 - acc: 0.586 - ETA: 4s - loss: 0.6693 - acc: 0.586 - ETA: 4s - loss: 0.6693 - acc: 0.586 - ETA: 4s - loss: 0.6693 - acc: 0.586 - ETA: 4s - loss: 0.6694 - acc: 0.586 - ETA: 4s - loss: 0.6694 - acc: 0.586 - ETA: 4s - loss: 0.6694 - acc: 0.586 - ETA: 3s - loss: 0.6694 - acc: 0.586 - ETA: 3s - loss: 0.6694 - acc: 0.586 - ETA: 3s - loss: 0.6693 - acc: 0.586 - ETA: 3s - loss: 0.6693 - acc: 0.586 - ETA: 3s - loss: 0.6693 - acc: 0.586 - ETA: 3s - loss: 0.6693 - acc: 0.586 - ETA: 3s - loss: 0.6693 - acc: 0.586 - ETA: 3s - loss: 0.6693 - acc: 0.586 - ETA: 3s - loss: 0.6693 - acc: 0.586 - ETA: 3s - loss: 0.6693 - acc: 0.586 - ETA: 3s - loss: 0.6693 - acc: 0.586 - ETA: 3s - loss: 0.6693 - acc: 0.586 - ETA: 3s - loss: 0.6693 - acc: 0.586 - ETA: 3s - loss: 0.6693 - acc: 0.586 - ETA: 3s - loss: 0.6693 - acc: 0.586 - ETA: 3s - loss: 0.6693 - acc: 0.586 - ETA: 2s - loss: 0.6693 - acc: 0.586 - ETA: 2s - loss: 0.6693 - acc: 0.586 - ETA: 2s - loss: 0.6693 - acc: 0.586 - ETA: 2s - loss: 0.6693 - acc: 0.586 - ETA: 2s - loss: 0.6693 - acc: 0.586 - ETA: 2s - loss: 0.6693 - acc: 0.586 - ETA: 2s - loss: 0.6693 - acc: 0.586 - ETA: 2s - loss: 0.6693 - acc: 0.586 - ETA: 2s - loss: 0.6693 - acc: 0.586 - ETA: 2s - loss: 0.6693 - acc: 0.586 - ETA: 2s - loss: 0.6693 - acc: 0.58642497802/2497802 [==============================] - ETA: 2s - loss: 0.6693 - acc: 0.586 - ETA: 2s - loss: 0.6693 - acc: 0.586 - ETA: 2s - loss: 0.6693 - acc: 0.586 - ETA: 2s - loss: 0.6693 - acc: 0.586 - ETA: 1s - loss: 0.6693 - acc: 0.586 - ETA: 1s - loss: 0.6693 - acc: 0.586 - ETA: 1s - loss: 0.6692 - acc: 0.586 - ETA: 1s - loss: 0.6692 - acc: 0.586 - ETA: 1s - loss: 0.6693 - acc: 0.586 - ETA: 1s - loss: 0.6692 - acc: 0.586 - ETA: 1s - loss: 0.6692 - acc: 0.586 - ETA: 1s - loss: 0.6692 - acc: 0.586 - ETA: 1s - loss: 0.6692 - acc: 0.586 - ETA: 1s - loss: 0.6692 - acc: 0.586 - ETA: 1s - loss: 0.6692 - acc: 0.586 - ETA: 1s - loss: 0.6692 - acc: 0.586 - ETA: 1s - loss: 0.6692 - acc: 0.586 - ETA: 1s - loss: 0.6692 - acc: 0.586 - ETA: 1s - loss: 0.6692 - acc: 0.586 - ETA: 0s - loss: 0.6692 - acc: 0.586 - ETA: 0s - loss: 0.6692 - acc: 0.586 - ETA: 0s - loss: 0.6692 - acc: 0.586 - ETA: 0s - loss: 0.6692 - acc: 0.586 - ETA: 0s - loss: 0.6692 - acc: 0.586 - ETA: 0s - loss: 0.6692 - acc: 0.586 - ETA: 0s - loss: 0.6692 - acc: 0.586 - ETA: 0s - loss: 0.6692 - acc: 0.586 - ETA: 0s - loss: 0.6692 - acc: 0.586 - ETA: 0s - loss: 0.6692 - acc: 0.586 - ETA: 0s - loss: 0.6692 - acc: 0.586 - ETA: 0s - loss: 0.6692 - acc: 0.586 - ETA: 0s - loss: 0.6692 - acc: 0.586 - ETA: 0s - loss: 0.6692 - acc: 0.586 - ETA: 0s - loss: 0.6691 - acc: 0.586 - 17s 7us/step - loss: 0.6692 - acc: 0.5867 - val_loss: 0.6550 - val_acc: 0.6210 Epoch 4/100 2150000/2497802 [========================>.....] - ETA: 37s - loss: 0.6693 - acc: 0.58 - ETA: 25s - loss: 0.6699 - acc: 0.58 - ETA: 21s - loss: 0.6690 - acc: 0.58 - ETA: 20s - loss: 0.6688 - acc: 0.58 - ETA: 19s - loss: 0.6690 - acc: 0.58 - ETA: 19s - loss: 0.6688 - acc: 0.59 - ETA: 19s - loss: 0.6687 - acc: 0.59 - ETA: 18s - loss: 0.6687 - acc: 0.58 - ETA: 18s - loss: 0.6687 - acc: 0.58 - ETA: 18s - loss: 0.6689 - acc: 0.58 - ETA: 17s - loss: 0.6686 - acc: 0.58 - ETA: 17s - loss: 0.6686 - acc: 0.58 - ETA: 17s - loss: 0.6685 - acc: 0.58 - ETA: 17s - loss: 0.6685 - acc: 0.58 - ETA: 17s - loss: 0.6687 - acc: 0.58 - ETA: 17s - loss: 0.6686 - acc: 0.58 - ETA: 16s - loss: 0.6685 - acc: 0.58 - ETA: 16s - loss: 0.6684 - acc: 0.58 - ETA: 16s - loss: 0.6687 - acc: 0.58 - ETA: 16s - loss: 0.6687 - acc: 0.58 - ETA: 16s - loss: 0.6688 - acc: 0.58 - ETA: 16s - loss: 0.6689 - acc: 0.58 - ETA: 16s - loss: 0.6687 - acc: 0.58 - ETA: 16s - loss: 0.6688 - acc: 0.58 - ETA: 15s - loss: 0.6687 - acc: 0.58 - ETA: 15s - loss: 0.6686 - acc: 0.58 - ETA: 15s - loss: 0.6687 - acc: 0.58 - ETA: 15s - loss: 0.6687 - acc: 0.58 - ETA: 15s - loss: 0.6687 - acc: 0.58 - ETA: 15s - loss: 0.6686 - acc: 0.58 - ETA: 15s - loss: 0.6685 - acc: 0.58 - ETA: 15s - loss: 0.6684 - acc: 0.58 - ETA: 15s - loss: 0.6684 - acc: 0.58 - ETA: 15s - loss: 0.6686 - acc: 0.58 - ETA: 15s - loss: 0.6686 - acc: 0.58 - ETA: 15s - loss: 0.6686 - acc: 0.58 - ETA: 14s - loss: 0.6685 - acc: 0.58 - ETA: 14s - loss: 0.6684 - acc: 0.58 - ETA: 14s - loss: 0.6685 - acc: 0.58 - ETA: 14s - loss: 0.6685 - acc: 0.58 - ETA: 14s - loss: 0.6685 - acc: 0.58 - ETA: 14s - loss: 0.6685 - acc: 0.58 - ETA: 14s - loss: 0.6685 - acc: 0.58 - ETA: 14s - loss: 0.6685 - acc: 0.58 - ETA: 14s - loss: 0.6685 - acc: 0.58 - ETA: 14s - loss: 0.6684 - acc: 0.58 - ETA: 14s - loss: 0.6685 - acc: 0.58 - ETA: 14s - loss: 0.6685 - acc: 0.58 - ETA: 13s - loss: 0.6684 - acc: 0.58 - ETA: 13s - loss: 0.6685 - acc: 0.58 - ETA: 13s - loss: 0.6685 - acc: 0.58 - ETA: 13s - loss: 0.6685 - acc: 0.58 - ETA: 13s - loss: 0.6685 - acc: 0.58 - ETA: 13s - loss: 0.6685 - acc: 0.58 - ETA: 13s - loss: 0.6684 - acc: 0.58 - ETA: 13s - loss: 0.6683 - acc: 0.58 - ETA: 13s - loss: 0.6683 - acc: 0.58 - ETA: 13s - loss: 0.6684 - acc: 0.58 - ETA: 13s - loss: 0.6684 - acc: 0.58 - ETA: 13s - loss: 0.6684 - acc: 0.58 - ETA: 13s - loss: 0.6685 - acc: 0.58 - ETA: 12s - loss: 0.6685 - acc: 0.58 - ETA: 12s - loss: 0.6685 - acc: 0.58 - ETA: 12s - loss: 0.6684 - acc: 0.58 - ETA: 12s - loss: 0.6684 - acc: 0.58 - ETA: 12s - loss: 0.6684 - acc: 0.58 - ETA: 12s - loss: 0.6684 - acc: 0.58 - ETA: 12s - loss: 0.6685 - acc: 0.58 - ETA: 12s - loss: 0.6685 - acc: 0.58 - ETA: 12s - loss: 0.6686 - acc: 0.58 - ETA: 12s - loss: 0.6686 - acc: 0.58 - ETA: 12s - loss: 0.6686 - acc: 0.58 - ETA: 12s - loss: 0.6685 - acc: 0.58 - ETA: 12s - loss: 0.6685 - acc: 0.58 - ETA: 12s - loss: 0.6685 - acc: 0.58 - ETA: 11s - loss: 0.6685 - acc: 0.58 - ETA: 11s - loss: 0.6685 - acc: 0.58 - ETA: 11s - loss: 0.6685 - acc: 0.58 - ETA: 11s - loss: 0.6685 - acc: 0.58 - ETA: 11s - loss: 0.6686 - acc: 0.58 - ETA: 11s - loss: 0.6686 - acc: 0.58 - ETA: 11s - loss: 0.6687 - acc: 0.58 - ETA: 11s - loss: 0.6687 - acc: 0.58 - ETA: 11s - loss: 0.6687 - acc: 0.58 - ETA: 11s - loss: 0.6686 - acc: 0.58 - ETA: 11s - loss: 0.6686 - acc: 0.58 - ETA: 11s - loss: 0.6687 - acc: 0.58 - ETA: 11s - loss: 0.6687 - acc: 0.58 - ETA: 11s - loss: 0.6687 - acc: 0.58 - ETA: 10s - loss: 0.6687 - acc: 0.58 - ETA: 10s - loss: 0.6687 - acc: 0.58 - ETA: 10s - loss: 0.6687 - acc: 0.58 - ETA: 10s - loss: 0.6688 - acc: 0.58 - ETA: 10s - loss: 0.6688 - acc: 0.58 - ETA: 10s - loss: 0.6687 - acc: 0.58 - ETA: 10s - loss: 0.6687 - acc: 0.58 - ETA: 10s - loss: 0.6687 - acc: 0.58 - ETA: 10s - loss: 0.6687 - acc: 0.58 - ETA: 10s - loss: 0.6687 - acc: 0.58 - ETA: 10s - loss: 0.6687 - acc: 0.58 - ETA: 10s - loss: 0.6687 - acc: 0.58 - ETA: 10s - loss: 0.6686 - acc: 0.58 - ETA: 10s - loss: 0.6687 - acc: 0.58 - ETA: 9s - loss: 0.6687 - acc: 0.5879 - ETA: 9s - loss: 0.6687 - acc: 0.587 - ETA: 9s - loss: 0.6687 - acc: 0.587 - ETA: 9s - loss: 0.6687 - acc: 0.587 - ETA: 9s - loss: 0.6687 - acc: 0.587 - ETA: 9s - loss: 0.6687 - acc: 0.587 - ETA: 9s - loss: 0.6687 - acc: 0.587 - ETA: 9s - loss: 0.6687 - acc: 0.587 - ETA: 9s - loss: 0.6687 - acc: 0.587 - ETA: 9s - loss: 0.6687 - acc: 0.587 - ETA: 9s - loss: 0.6686 - acc: 0.587 - ETA: 9s - loss: 0.6686 - acc: 0.587 - ETA: 9s - loss: 0.6686 - acc: 0.587 - ETA: 9s - loss: 0.6686 - acc: 0.587 - ETA: 8s - loss: 0.6686 - acc: 0.587 - ETA: 8s - loss: 0.6686 - acc: 0.587 - ETA: 8s - loss: 0.6686 - acc: 0.587 - ETA: 8s - loss: 0.6686 - acc: 0.587 - ETA: 8s - loss: 0.6686 - acc: 0.587 - ETA: 8s - loss: 0.6686 - acc: 0.587 - ETA: 8s - loss: 0.6686 - acc: 0.587 - ETA: 8s - loss: 0.6686 - acc: 0.587 - ETA: 8s - loss: 0.6686 - acc: 0.587 - ETA: 8s - loss: 0.6686 - acc: 0.587 - ETA: 8s - loss: 0.6686 - acc: 0.587 - ETA: 8s - loss: 0.6686 - acc: 0.588 - ETA: 8s - loss: 0.6686 - acc: 0.587 - ETA: 8s - loss: 0.6686 - acc: 0.587 - ETA: 8s - loss: 0.6686 - acc: 0.587 - ETA: 7s - loss: 0.6686 - acc: 0.588 - ETA: 7s - loss: 0.6686 - acc: 0.588 - ETA: 7s - loss: 0.6686 - acc: 0.588 - ETA: 7s - loss: 0.6686 - acc: 0.587 - ETA: 7s - loss: 0.6686 - acc: 0.587 - ETA: 7s - loss: 0.6686 - acc: 0.587 - ETA: 7s - loss: 0.6686 - acc: 0.587 - ETA: 7s - loss: 0.6686 - acc: 0.587 - ETA: 7s - loss: 0.6685 - acc: 0.588 - ETA: 7s - loss: 0.6685 - acc: 0.588 - ETA: 7s - loss: 0.6685 - acc: 0.588 - ETA: 7s - loss: 0.6685 - acc: 0.588 - ETA: 7s - loss: 0.6685 - acc: 0.588 - ETA: 7s - loss: 0.6685 - acc: 0.588 - ETA: 7s - loss: 0.6685 - acc: 0.588 - ETA: 6s - loss: 0.6685 - acc: 0.588 - ETA: 6s - loss: 0.6685 - acc: 0.588 - ETA: 6s - loss: 0.6686 - acc: 0.587 - ETA: 6s - loss: 0.6686 - acc: 0.588 - ETA: 6s - loss: 0.6686 - acc: 0.588 - ETA: 6s - loss: 0.6685 - acc: 0.588 - ETA: 6s - loss: 0.6685 - acc: 0.588 - ETA: 6s - loss: 0.6685 - acc: 0.588 - ETA: 6s - loss: 0.6685 - acc: 0.588 - ETA: 6s - loss: 0.6685 - acc: 0.588 - ETA: 6s - loss: 0.6686 - acc: 0.588 - ETA: 6s - loss: 0.6686 - acc: 0.588 - ETA: 6s - loss: 0.6686 - acc: 0.588 - ETA: 6s - loss: 0.6686 - acc: 0.588 - ETA: 5s - loss: 0.6686 - acc: 0.588 - ETA: 5s - loss: 0.6686 - acc: 0.588 - ETA: 5s - loss: 0.6686 - acc: 0.588 - ETA: 5s - loss: 0.6686 - acc: 0.588 - ETA: 5s - loss: 0.6686 - acc: 0.588 - ETA: 5s - loss: 0.6686 - acc: 0.588 - ETA: 5s - loss: 0.6686 - acc: 0.587 - ETA: 5s - loss: 0.6686 - acc: 0.587 - ETA: 5s - loss: 0.6686 - acc: 0.587 - ETA: 5s - loss: 0.6686 - acc: 0.588 - ETA: 5s - loss: 0.6686 - acc: 0.588 - ETA: 5s - loss: 0.6686 - acc: 0.588 - ETA: 5s - loss: 0.6686 - acc: 0.588 - ETA: 5s - loss: 0.6686 - acc: 0.588 - ETA: 5s - loss: 0.6686 - acc: 0.588 - ETA: 4s - loss: 0.6686 - acc: 0.587 - ETA: 4s - loss: 0.6686 - acc: 0.587 - ETA: 4s - loss: 0.6686 - acc: 0.588 - ETA: 4s - loss: 0.6686 - acc: 0.588 - ETA: 4s - loss: 0.6686 - acc: 0.587 - ETA: 4s - loss: 0.6686 - acc: 0.587 - ETA: 4s - loss: 0.6686 - acc: 0.587 - ETA: 4s - loss: 0.6686 - acc: 0.587 - ETA: 4s - loss: 0.6686 - acc: 0.587 - ETA: 4s - loss: 0.6686 - acc: 0.587 - ETA: 4s - loss: 0.6686 - acc: 0.587 - ETA: 4s - loss: 0.6686 - acc: 0.587 - ETA: 4s - loss: 0.6686 - acc: 0.587 - ETA: 4s - loss: 0.6686 - acc: 0.587 - ETA: 3s - loss: 0.6686 - acc: 0.587 - ETA: 3s - loss: 0.6686 - acc: 0.587 - ETA: 3s - loss: 0.6686 - acc: 0.587 - ETA: 3s - loss: 0.6686 - acc: 0.587 - ETA: 3s - loss: 0.6685 - acc: 0.587 - ETA: 3s - loss: 0.6685 - acc: 0.587 - ETA: 3s - loss: 0.6685 - acc: 0.587 - ETA: 3s - loss: 0.6685 - acc: 0.588 - ETA: 3s - loss: 0.6685 - acc: 0.588 - ETA: 3s - loss: 0.6685 - acc: 0.588 - ETA: 3s - loss: 0.6685 - acc: 0.588 - ETA: 3s - loss: 0.6685 - acc: 0.588 - ETA: 3s - loss: 0.6685 - acc: 0.588 - ETA: 3s - loss: 0.6685 - acc: 0.588 - ETA: 3s - loss: 0.6685 - acc: 0.588 - ETA: 2s - loss: 0.6685 - acc: 0.588 - ETA: 2s - loss: 0.6685 - acc: 0.588 - ETA: 2s - loss: 0.6685 - acc: 0.588 - ETA: 2s - loss: 0.6685 - acc: 0.588 - ETA: 2s - loss: 0.6685 - acc: 0.588 - ETA: 2s - loss: 0.6685 - acc: 0.588 - ETA: 2s - loss: 0.6685 - acc: 0.588 - ETA: 2s - loss: 0.6684 - acc: 0.588 - ETA: 2s - loss: 0.6684 - acc: 0.588 - ETA: 2s - loss: 0.6684 - acc: 0.58812497802/2497802 [==============================] - ETA: 2s - loss: 0.6684 - acc: 0.588 - ETA: 2s - loss: 0.6684 - acc: 0.588 - ETA: 2s - loss: 0.6684 - acc: 0.588 - ETA: 2s - loss: 0.6684 - acc: 0.588 - ETA: 2s - loss: 0.6684 - acc: 0.588 - ETA: 1s - loss: 0.6684 - acc: 0.588 - ETA: 1s - loss: 0.6684 - acc: 0.588 - ETA: 1s - loss: 0.6684 - acc: 0.588 - ETA: 1s - loss: 0.6684 - acc: 0.588 - ETA: 1s - loss: 0.6684 - acc: 0.588 - ETA: 1s - loss: 0.6684 - acc: 0.588 - ETA: 1s - loss: 0.6684 - acc: 0.588 - ETA: 1s - loss: 0.6684 - acc: 0.588 - ETA: 1s - loss: 0.6684 - acc: 0.588 - ETA: 1s - loss: 0.6684 - acc: 0.588 - ETA: 1s - loss: 0.6684 - acc: 0.588 - ETA: 1s - loss: 0.6684 - acc: 0.588 - ETA: 1s - loss: 0.6684 - acc: 0.588 - ETA: 1s - loss: 0.6684 - acc: 0.588 - ETA: 1s - loss: 0.6684 - acc: 0.588 - ETA: 0s - loss: 0.6684 - acc: 0.588 - ETA: 0s - loss: 0.6684 - acc: 0.588 - ETA: 0s - loss: 0.6684 - acc: 0.588 - ETA: 0s - loss: 0.6684 - acc: 0.588 - ETA: 0s - loss: 0.6684 - acc: 0.588 - ETA: 0s - loss: 0.6684 - acc: 0.588 - ETA: 0s - loss: 0.6684 - acc: 0.588 - ETA: 0s - loss: 0.6684 - acc: 0.588 - ETA: 0s - loss: 0.6684 - acc: 0.588 - ETA: 0s - loss: 0.6684 - acc: 0.588 - ETA: 0s - loss: 0.6684 - acc: 0.588 - ETA: 0s - loss: 0.6684 - acc: 0.588 - ETA: 0s - loss: 0.6684 - acc: 0.588 - ETA: 0s - loss: 0.6684 - acc: 0.588 - 18s 7us/step - loss: 0.6684 - acc: 0.5885 - val_loss: 0.6545 - val_acc: 0.6218 Epoch 5/100 2150000/2497802 [========================>.....] - ETA: 37s - loss: 0.6758 - acc: 0.57 - ETA: 25s - loss: 0.6707 - acc: 0.58 - ETA: 23s - loss: 0.6695 - acc: 0.58 - ETA: 21s - loss: 0.6693 - acc: 0.58 - ETA: 20s - loss: 0.6692 - acc: 0.58 - ETA: 19s - loss: 0.6691 - acc: 0.58 - ETA: 19s - loss: 0.6692 - acc: 0.58 - ETA: 18s - loss: 0.6689 - acc: 0.58 - ETA: 18s - loss: 0.6691 - acc: 0.58 - ETA: 18s - loss: 0.6692 - acc: 0.58 - ETA: 17s - loss: 0.6694 - acc: 0.58 - ETA: 17s - loss: 0.6689 - acc: 0.58 - ETA: 17s - loss: 0.6688 - acc: 0.58 - ETA: 17s - loss: 0.6687 - acc: 0.58 - ETA: 17s - loss: 0.6687 - acc: 0.58 - ETA: 16s - loss: 0.6687 - acc: 0.58 - ETA: 16s - loss: 0.6689 - acc: 0.58 - ETA: 16s - loss: 0.6689 - acc: 0.58 - ETA: 16s - loss: 0.6690 - acc: 0.58 - ETA: 16s - loss: 0.6691 - acc: 0.58 - ETA: 16s - loss: 0.6690 - acc: 0.58 - ETA: 16s - loss: 0.6690 - acc: 0.58 - ETA: 16s - loss: 0.6689 - acc: 0.58 - ETA: 16s - loss: 0.6691 - acc: 0.58 - ETA: 16s - loss: 0.6690 - acc: 0.58 - ETA: 15s - loss: 0.6691 - acc: 0.58 - ETA: 15s - loss: 0.6691 - acc: 0.58 - ETA: 15s - loss: 0.6690 - acc: 0.58 - ETA: 15s - loss: 0.6690 - acc: 0.58 - ETA: 15s - loss: 0.6690 - acc: 0.58 - ETA: 15s - loss: 0.6690 - acc: 0.58 - ETA: 15s - loss: 0.6690 - acc: 0.58 - ETA: 15s - loss: 0.6690 - acc: 0.58 - ETA: 15s - loss: 0.6690 - acc: 0.58 - ETA: 15s - loss: 0.6688 - acc: 0.58 - ETA: 14s - loss: 0.6687 - acc: 0.58 - ETA: 14s - loss: 0.6687 - acc: 0.58 - ETA: 14s - loss: 0.6688 - acc: 0.58 - ETA: 14s - loss: 0.6689 - acc: 0.58 - ETA: 14s - loss: 0.6689 - acc: 0.58 - ETA: 14s - loss: 0.6689 - acc: 0.58 - ETA: 14s - loss: 0.6688 - acc: 0.58 - ETA: 14s - loss: 0.6688 - acc: 0.58 - ETA: 14s - loss: 0.6689 - acc: 0.58 - ETA: 14s - loss: 0.6689 - acc: 0.58 - ETA: 14s - loss: 0.6689 - acc: 0.58 - ETA: 14s - loss: 0.6687 - acc: 0.58 - ETA: 13s - loss: 0.6686 - acc: 0.58 - ETA: 13s - loss: 0.6686 - acc: 0.58 - ETA: 13s - loss: 0.6686 - acc: 0.58 - ETA: 13s - loss: 0.6685 - acc: 0.58 - ETA: 13s - loss: 0.6685 - acc: 0.58 - ETA: 13s - loss: 0.6685 - acc: 0.58 - ETA: 13s - loss: 0.6685 - acc: 0.58 - ETA: 13s - loss: 0.6686 - acc: 0.58 - ETA: 13s - loss: 0.6686 - acc: 0.58 - ETA: 13s - loss: 0.6686 - acc: 0.58 - ETA: 13s - loss: 0.6685 - acc: 0.58 - ETA: 13s - loss: 0.6685 - acc: 0.58 - ETA: 13s - loss: 0.6685 - acc: 0.58 - ETA: 13s - loss: 0.6685 - acc: 0.58 - ETA: 12s - loss: 0.6686 - acc: 0.58 - ETA: 12s - loss: 0.6685 - acc: 0.58 - ETA: 12s - loss: 0.6685 - acc: 0.58 - ETA: 12s - loss: 0.6685 - acc: 0.58 - ETA: 12s - loss: 0.6685 - acc: 0.58 - ETA: 12s - loss: 0.6684 - acc: 0.58 - ETA: 12s - loss: 0.6684 - acc: 0.58 - ETA: 12s - loss: 0.6684 - acc: 0.58 - ETA: 12s - loss: 0.6683 - acc: 0.58 - ETA: 12s - loss: 0.6683 - acc: 0.58 - ETA: 12s - loss: 0.6683 - acc: 0.58 - ETA: 12s - loss: 0.6684 - acc: 0.58 - ETA: 12s - loss: 0.6684 - acc: 0.58 - ETA: 12s - loss: 0.6684 - acc: 0.58 - ETA: 11s - loss: 0.6684 - acc: 0.58 - ETA: 11s - loss: 0.6684 - acc: 0.58 - ETA: 11s - loss: 0.6684 - acc: 0.58 - ETA: 11s - loss: 0.6684 - acc: 0.58 - ETA: 11s - loss: 0.6684 - acc: 0.58 - ETA: 11s - loss: 0.6684 - acc: 0.58 - ETA: 11s - loss: 0.6684 - acc: 0.58 - ETA: 11s - loss: 0.6684 - acc: 0.58 - ETA: 11s - loss: 0.6684 - acc: 0.58 - ETA: 11s - loss: 0.6685 - acc: 0.58 - ETA: 11s - loss: 0.6684 - acc: 0.58 - ETA: 11s - loss: 0.6684 - acc: 0.58 - ETA: 11s - loss: 0.6685 - acc: 0.58 - ETA: 11s - loss: 0.6684 - acc: 0.58 - ETA: 10s - loss: 0.6684 - acc: 0.58 - ETA: 10s - loss: 0.6684 - acc: 0.58 - ETA: 10s - loss: 0.6684 - acc: 0.58 - ETA: 10s - loss: 0.6684 - acc: 0.58 - ETA: 10s - loss: 0.6684 - acc: 0.58 - ETA: 10s - loss: 0.6683 - acc: 0.58 - ETA: 10s - loss: 0.6683 - acc: 0.58 - ETA: 10s - loss: 0.6683 - acc: 0.58 - ETA: 10s - loss: 0.6683 - acc: 0.58 - ETA: 10s - loss: 0.6683 - acc: 0.58 - ETA: 10s - loss: 0.6682 - acc: 0.58 - ETA: 10s - loss: 0.6683 - acc: 0.58 - ETA: 10s - loss: 0.6683 - acc: 0.58 - ETA: 10s - loss: 0.6682 - acc: 0.58 - ETA: 10s - loss: 0.6682 - acc: 0.58 - ETA: 9s - loss: 0.6682 - acc: 0.5886 - ETA: 9s - loss: 0.6682 - acc: 0.588 - ETA: 9s - loss: 0.6682 - acc: 0.588 - ETA: 9s - loss: 0.6682 - acc: 0.588 - ETA: 9s - loss: 0.6682 - acc: 0.588 - ETA: 9s - loss: 0.6681 - acc: 0.588 - ETA: 9s - loss: 0.6681 - acc: 0.588 - ETA: 9s - loss: 0.6681 - acc: 0.588 - ETA: 9s - loss: 0.6681 - acc: 0.588 - ETA: 9s - loss: 0.6681 - acc: 0.588 - ETA: 9s - loss: 0.6681 - acc: 0.588 - ETA: 9s - loss: 0.6681 - acc: 0.588 - ETA: 9s - loss: 0.6681 - acc: 0.588 - ETA: 9s - loss: 0.6681 - acc: 0.588 - ETA: 8s - loss: 0.6681 - acc: 0.588 - ETA: 8s - loss: 0.6681 - acc: 0.588 - ETA: 8s - loss: 0.6680 - acc: 0.588 - ETA: 8s - loss: 0.6680 - acc: 0.588 - ETA: 8s - loss: 0.6681 - acc: 0.588 - ETA: 8s - loss: 0.6680 - acc: 0.588 - ETA: 8s - loss: 0.6681 - acc: 0.588 - ETA: 8s - loss: 0.6681 - acc: 0.588 - ETA: 8s - loss: 0.6681 - acc: 0.588 - ETA: 8s - loss: 0.6681 - acc: 0.588 - ETA: 8s - loss: 0.6681 - acc: 0.588 - ETA: 8s - loss: 0.6680 - acc: 0.588 - ETA: 8s - loss: 0.6681 - acc: 0.588 - ETA: 8s - loss: 0.6680 - acc: 0.588 - ETA: 7s - loss: 0.6680 - acc: 0.588 - ETA: 7s - loss: 0.6681 - acc: 0.588 - ETA: 7s - loss: 0.6681 - acc: 0.588 - ETA: 7s - loss: 0.6681 - acc: 0.588 - ETA: 7s - loss: 0.6681 - acc: 0.588 - ETA: 7s - loss: 0.6681 - acc: 0.588 - ETA: 7s - loss: 0.6680 - acc: 0.588 - ETA: 7s - loss: 0.6681 - acc: 0.588 - ETA: 7s - loss: 0.6680 - acc: 0.589 - ETA: 7s - loss: 0.6680 - acc: 0.589 - ETA: 7s - loss: 0.6680 - acc: 0.589 - ETA: 7s - loss: 0.6680 - acc: 0.588 - ETA: 7s - loss: 0.6680 - acc: 0.588 - ETA: 7s - loss: 0.6680 - acc: 0.589 - ETA: 7s - loss: 0.6680 - acc: 0.589 - ETA: 6s - loss: 0.6680 - acc: 0.589 - ETA: 6s - loss: 0.6679 - acc: 0.589 - ETA: 6s - loss: 0.6679 - acc: 0.589 - ETA: 6s - loss: 0.6679 - acc: 0.589 - ETA: 6s - loss: 0.6679 - acc: 0.589 - ETA: 6s - loss: 0.6679 - acc: 0.589 - ETA: 6s - loss: 0.6680 - acc: 0.588 - ETA: 6s - loss: 0.6680 - acc: 0.588 - ETA: 6s - loss: 0.6679 - acc: 0.588 - ETA: 6s - loss: 0.6679 - acc: 0.589 - ETA: 6s - loss: 0.6679 - acc: 0.589 - ETA: 6s - loss: 0.6679 - acc: 0.589 - ETA: 6s - loss: 0.6679 - acc: 0.589 - ETA: 6s - loss: 0.6679 - acc: 0.589 - ETA: 6s - loss: 0.6679 - acc: 0.589 - ETA: 5s - loss: 0.6679 - acc: 0.589 - ETA: 5s - loss: 0.6679 - acc: 0.589 - ETA: 5s - loss: 0.6679 - acc: 0.589 - ETA: 5s - loss: 0.6679 - acc: 0.589 - ETA: 5s - loss: 0.6679 - acc: 0.589 - ETA: 5s - loss: 0.6679 - acc: 0.589 - ETA: 5s - loss: 0.6679 - acc: 0.589 - ETA: 5s - loss: 0.6679 - acc: 0.589 - ETA: 5s - loss: 0.6679 - acc: 0.589 - ETA: 5s - loss: 0.6679 - acc: 0.589 - ETA: 5s - loss: 0.6679 - acc: 0.589 - ETA: 5s - loss: 0.6679 - acc: 0.589 - ETA: 5s - loss: 0.6679 - acc: 0.589 - ETA: 5s - loss: 0.6679 - acc: 0.589 - ETA: 4s - loss: 0.6679 - acc: 0.589 - ETA: 4s - loss: 0.6679 - acc: 0.589 - ETA: 4s - loss: 0.6679 - acc: 0.589 - ETA: 4s - loss: 0.6679 - acc: 0.589 - ETA: 4s - loss: 0.6679 - acc: 0.589 - ETA: 4s - loss: 0.6679 - acc: 0.589 - ETA: 4s - loss: 0.6679 - acc: 0.589 - ETA: 4s - loss: 0.6679 - acc: 0.589 - ETA: 4s - loss: 0.6679 - acc: 0.589 - ETA: 4s - loss: 0.6679 - acc: 0.589 - ETA: 4s - loss: 0.6679 - acc: 0.589 - ETA: 4s - loss: 0.6679 - acc: 0.589 - ETA: 4s - loss: 0.6679 - acc: 0.589 - ETA: 4s - loss: 0.6679 - acc: 0.589 - ETA: 4s - loss: 0.6679 - acc: 0.589 - ETA: 3s - loss: 0.6679 - acc: 0.589 - ETA: 3s - loss: 0.6679 - acc: 0.589 - ETA: 3s - loss: 0.6679 - acc: 0.589 - ETA: 3s - loss: 0.6679 - acc: 0.589 - ETA: 3s - loss: 0.6679 - acc: 0.589 - ETA: 3s - loss: 0.6679 - acc: 0.589 - ETA: 3s - loss: 0.6679 - acc: 0.589 - ETA: 3s - loss: 0.6679 - acc: 0.589 - ETA: 3s - loss: 0.6679 - acc: 0.589 - ETA: 3s - loss: 0.6679 - acc: 0.589 - ETA: 3s - loss: 0.6679 - acc: 0.589 - ETA: 3s - loss: 0.6679 - acc: 0.589 - ETA: 3s - loss: 0.6679 - acc: 0.589 - ETA: 3s - loss: 0.6679 - acc: 0.589 - ETA: 2s - loss: 0.6679 - acc: 0.589 - ETA: 2s - loss: 0.6679 - acc: 0.589 - ETA: 2s - loss: 0.6679 - acc: 0.589 - ETA: 2s - loss: 0.6679 - acc: 0.589 - ETA: 2s - loss: 0.6679 - acc: 0.589 - ETA: 2s - loss: 0.6679 - acc: 0.589 - ETA: 2s - loss: 0.6679 - acc: 0.589 - ETA: 2s - loss: 0.6679 - acc: 0.589 - ETA: 2s - loss: 0.6679 - acc: 0.589 - ETA: 2s - loss: 0.6679 - acc: 0.58912497802/2497802 [==============================] - ETA: 2s - loss: 0.6679 - acc: 0.589 - ETA: 2s - loss: 0.6679 - acc: 0.589 - ETA: 2s - loss: 0.6679 - acc: 0.589 - ETA: 2s - loss: 0.6679 - acc: 0.589 - ETA: 2s - loss: 0.6679 - acc: 0.589 - ETA: 1s - loss: 0.6679 - acc: 0.589 - ETA: 1s - loss: 0.6679 - acc: 0.589 - ETA: 1s - loss: 0.6679 - acc: 0.589 - ETA: 1s - loss: 0.6679 - acc: 0.589 - ETA: 1s - loss: 0.6679 - acc: 0.589 - ETA: 1s - loss: 0.6679 - acc: 0.589 - ETA: 1s - loss: 0.6679 - acc: 0.589 - ETA: 1s - loss: 0.6679 - acc: 0.589 - ETA: 1s - loss: 0.6679 - acc: 0.589 - ETA: 1s - loss: 0.6679 - acc: 0.589 - ETA: 1s - loss: 0.6679 - acc: 0.589 - ETA: 1s - loss: 0.6678 - acc: 0.589 - ETA: 1s - loss: 0.6678 - acc: 0.589 - ETA: 1s - loss: 0.6678 - acc: 0.589 - ETA: 1s - loss: 0.6678 - acc: 0.589 - ETA: 0s - loss: 0.6678 - acc: 0.589 - ETA: 0s - loss: 0.6679 - acc: 0.589 - ETA: 0s - loss: 0.6679 - acc: 0.589 - ETA: 0s - loss: 0.6679 - acc: 0.589 - ETA: 0s - loss: 0.6679 - acc: 0.589 - ETA: 0s - loss: 0.6679 - acc: 0.589 - ETA: 0s - loss: 0.6679 - acc: 0.589 - ETA: 0s - loss: 0.6679 - acc: 0.589 - ETA: 0s - loss: 0.6678 - acc: 0.589 - ETA: 0s - loss: 0.6678 - acc: 0.589 - ETA: 0s - loss: 0.6678 - acc: 0.589 - ETA: 0s - loss: 0.6678 - acc: 0.589 - ETA: 0s - loss: 0.6678 - acc: 0.589 - ETA: 0s - loss: 0.6678 - acc: 0.589 - 18s 7us/step - loss: 0.6678 - acc: 0.5892 - val_loss: 0.6541 - val_acc: 0.6220 Epoch 6/100 2150000/2497802 [========================>.....] - ETA: 36s - loss: 0.6670 - acc: 0.59 - ETA: 25s - loss: 0.6664 - acc: 0.59 - ETA: 21s - loss: 0.6675 - acc: 0.59 - ETA: 20s - loss: 0.6672 - acc: 0.59 - ETA: 19s - loss: 0.6667 - acc: 0.59 - ETA: 19s - loss: 0.6669 - acc: 0.59 - ETA: 19s - loss: 0.6671 - acc: 0.59 - ETA: 18s - loss: 0.6674 - acc: 0.59 - ETA: 18s - loss: 0.6674 - acc: 0.59 - ETA: 18s - loss: 0.6676 - acc: 0.58 - ETA: 18s - loss: 0.6674 - acc: 0.59 - ETA: 18s - loss: 0.6672 - acc: 0.59 - ETA: 17s - loss: 0.6673 - acc: 0.58 - ETA: 17s - loss: 0.6673 - acc: 0.58 - ETA: 17s - loss: 0.6673 - acc: 0.58 - ETA: 17s - loss: 0.6673 - acc: 0.58 - ETA: 17s - loss: 0.6673 - acc: 0.58 - ETA: 17s - loss: 0.6672 - acc: 0.58 - ETA: 17s - loss: 0.6673 - acc: 0.58 - ETA: 16s - loss: 0.6673 - acc: 0.58 - ETA: 16s - loss: 0.6674 - acc: 0.58 - ETA: 16s - loss: 0.6675 - acc: 0.58 - ETA: 16s - loss: 0.6675 - acc: 0.58 - ETA: 16s - loss: 0.6676 - acc: 0.58 - ETA: 16s - loss: 0.6676 - acc: 0.58 - ETA: 16s - loss: 0.6676 - acc: 0.58 - ETA: 16s - loss: 0.6676 - acc: 0.58 - ETA: 16s - loss: 0.6676 - acc: 0.58 - ETA: 15s - loss: 0.6676 - acc: 0.58 - ETA: 15s - loss: 0.6676 - acc: 0.58 - ETA: 15s - loss: 0.6675 - acc: 0.58 - ETA: 15s - loss: 0.6676 - acc: 0.58 - ETA: 15s - loss: 0.6675 - acc: 0.58 - ETA: 15s - loss: 0.6676 - acc: 0.58 - ETA: 15s - loss: 0.6675 - acc: 0.58 - ETA: 15s - loss: 0.6675 - acc: 0.58 - ETA: 14s - loss: 0.6674 - acc: 0.58 - ETA: 14s - loss: 0.6674 - acc: 0.58 - ETA: 14s - loss: 0.6673 - acc: 0.58 - ETA: 14s - loss: 0.6674 - acc: 0.58 - ETA: 14s - loss: 0.6673 - acc: 0.58 - ETA: 14s - loss: 0.6674 - acc: 0.58 - ETA: 13s - loss: 0.6674 - acc: 0.58 - ETA: 13s - loss: 0.6675 - acc: 0.58 - ETA: 13s - loss: 0.6675 - acc: 0.58 - ETA: 13s - loss: 0.6675 - acc: 0.58 - ETA: 13s - loss: 0.6675 - acc: 0.58 - ETA: 13s - loss: 0.6674 - acc: 0.58 - ETA: 13s - loss: 0.6674 - acc: 0.58 - ETA: 13s - loss: 0.6673 - acc: 0.58 - ETA: 13s - loss: 0.6673 - acc: 0.58 - ETA: 13s - loss: 0.6673 - acc: 0.58 - ETA: 13s - loss: 0.6673 - acc: 0.58 - ETA: 13s - loss: 0.6673 - acc: 0.58 - ETA: 12s - loss: 0.6673 - acc: 0.58 - ETA: 12s - loss: 0.6673 - acc: 0.58 - ETA: 12s - loss: 0.6673 - acc: 0.58 - ETA: 12s - loss: 0.6673 - acc: 0.58 - ETA: 12s - loss: 0.6673 - acc: 0.58 - ETA: 12s - loss: 0.6674 - acc: 0.58 - ETA: 12s - loss: 0.6674 - acc: 0.58 - ETA: 12s - loss: 0.6673 - acc: 0.58 - ETA: 12s - loss: 0.6673 - acc: 0.58 - ETA: 12s - loss: 0.6673 - acc: 0.58 - ETA: 12s - loss: 0.6673 - acc: 0.58 - ETA: 12s - loss: 0.6673 - acc: 0.58 - ETA: 12s - loss: 0.6673 - acc: 0.58 - ETA: 12s - loss: 0.6673 - acc: 0.58 - ETA: 12s - loss: 0.6673 - acc: 0.58 - ETA: 12s - loss: 0.6673 - acc: 0.58 - ETA: 11s - loss: 0.6673 - acc: 0.58 - ETA: 11s - loss: 0.6673 - acc: 0.58 - ETA: 11s - loss: 0.6673 - acc: 0.58 - ETA: 11s - loss: 0.6673 - acc: 0.58 - ETA: 11s - loss: 0.6673 - acc: 0.58 - ETA: 11s - loss: 0.6673 - acc: 0.58 - ETA: 11s - loss: 0.6673 - acc: 0.58 - ETA: 11s - loss: 0.6673 - acc: 0.58 - ETA: 11s - loss: 0.6673 - acc: 0.58 - ETA: 11s - loss: 0.6673 - acc: 0.58 - ETA: 11s - loss: 0.6673 - acc: 0.58 - ETA: 11s - loss: 0.6673 - acc: 0.58 - ETA: 11s - loss: 0.6673 - acc: 0.58 - ETA: 11s - loss: 0.6673 - acc: 0.58 - ETA: 11s - loss: 0.6673 - acc: 0.58 - ETA: 10s - loss: 0.6673 - acc: 0.58 - ETA: 10s - loss: 0.6673 - acc: 0.58 - ETA: 10s - loss: 0.6673 - acc: 0.58 - ETA: 10s - loss: 0.6673 - acc: 0.58 - ETA: 10s - loss: 0.6673 - acc: 0.58 - ETA: 10s - loss: 0.6673 - acc: 0.58 - ETA: 10s - loss: 0.6674 - acc: 0.58 - ETA: 10s - loss: 0.6674 - acc: 0.58 - ETA: 10s - loss: 0.6674 - acc: 0.58 - ETA: 10s - loss: 0.6674 - acc: 0.58 - ETA: 10s - loss: 0.6674 - acc: 0.58 - ETA: 10s - loss: 0.6674 - acc: 0.58 - ETA: 10s - loss: 0.6674 - acc: 0.58 - ETA: 10s - loss: 0.6674 - acc: 0.58 - ETA: 10s - loss: 0.6674 - acc: 0.58 - ETA: 9s - loss: 0.6674 - acc: 0.5894 - ETA: 9s - loss: 0.6674 - acc: 0.589 - ETA: 9s - loss: 0.6675 - acc: 0.589 - ETA: 9s - loss: 0.6674 - acc: 0.589 - ETA: 9s - loss: 0.6674 - acc: 0.589 - ETA: 9s - loss: 0.6674 - acc: 0.589 - ETA: 9s - loss: 0.6674 - acc: 0.589 - ETA: 9s - loss: 0.6674 - acc: 0.589 - ETA: 9s - loss: 0.6674 - acc: 0.589 - ETA: 9s - loss: 0.6674 - acc: 0.589 - ETA: 9s - loss: 0.6674 - acc: 0.589 - ETA: 9s - loss: 0.6674 - acc: 0.589 - ETA: 9s - loss: 0.6674 - acc: 0.589 - ETA: 9s - loss: 0.6675 - acc: 0.589 - ETA: 9s - loss: 0.6675 - acc: 0.589 - ETA: 8s - loss: 0.6674 - acc: 0.589 - ETA: 8s - loss: 0.6675 - acc: 0.589 - ETA: 8s - loss: 0.6675 - acc: 0.589 - ETA: 8s - loss: 0.6675 - acc: 0.589 - ETA: 8s - loss: 0.6675 - acc: 0.589 - ETA: 8s - loss: 0.6675 - acc: 0.589 - ETA: 8s - loss: 0.6675 - acc: 0.589 - ETA: 8s - loss: 0.6675 - acc: 0.589 - ETA: 8s - loss: 0.6675 - acc: 0.589 - ETA: 8s - loss: 0.6675 - acc: 0.589 - ETA: 8s - loss: 0.6676 - acc: 0.589 - ETA: 8s - loss: 0.6676 - acc: 0.589 - ETA: 8s - loss: 0.6676 - acc: 0.589 - ETA: 8s - loss: 0.6676 - acc: 0.589 - ETA: 8s - loss: 0.6676 - acc: 0.589 - ETA: 7s - loss: 0.6676 - acc: 0.589 - ETA: 7s - loss: 0.6676 - acc: 0.589 - ETA: 7s - loss: 0.6676 - acc: 0.589 - ETA: 7s - loss: 0.6676 - acc: 0.589 - ETA: 7s - loss: 0.6676 - acc: 0.589 - ETA: 7s - loss: 0.6676 - acc: 0.589 - ETA: 7s - loss: 0.6676 - acc: 0.589 - ETA: 7s - loss: 0.6676 - acc: 0.589 - ETA: 7s - loss: 0.6676 - acc: 0.589 - ETA: 7s - loss: 0.6676 - acc: 0.589 - ETA: 7s - loss: 0.6676 - acc: 0.589 - ETA: 7s - loss: 0.6676 - acc: 0.589 - ETA: 7s - loss: 0.6676 - acc: 0.589 - ETA: 7s - loss: 0.6676 - acc: 0.589 - ETA: 7s - loss: 0.6676 - acc: 0.589 - ETA: 6s - loss: 0.6676 - acc: 0.589 - ETA: 6s - loss: 0.6676 - acc: 0.589 - ETA: 6s - loss: 0.6676 - acc: 0.589 - ETA: 6s - loss: 0.6676 - acc: 0.589 - ETA: 6s - loss: 0.6676 - acc: 0.589 - ETA: 6s - loss: 0.6676 - acc: 0.589 - ETA: 6s - loss: 0.6676 - acc: 0.589 - ETA: 6s - loss: 0.6677 - acc: 0.589 - ETA: 6s - loss: 0.6677 - acc: 0.589 - ETA: 6s - loss: 0.6677 - acc: 0.589 - ETA: 6s - loss: 0.6677 - acc: 0.589 - ETA: 6s - loss: 0.6677 - acc: 0.589 - ETA: 6s - loss: 0.6677 - acc: 0.589 - ETA: 6s - loss: 0.6677 - acc: 0.589 - ETA: 6s - loss: 0.6677 - acc: 0.589 - ETA: 5s - loss: 0.6677 - acc: 0.589 - ETA: 5s - loss: 0.6677 - acc: 0.589 - ETA: 5s - loss: 0.6677 - acc: 0.589 - ETA: 5s - loss: 0.6676 - acc: 0.589 - ETA: 5s - loss: 0.6676 - acc: 0.589 - ETA: 5s - loss: 0.6676 - acc: 0.589 - ETA: 5s - loss: 0.6676 - acc: 0.589 - ETA: 5s - loss: 0.6676 - acc: 0.589 - ETA: 5s - loss: 0.6676 - acc: 0.589 - ETA: 5s - loss: 0.6676 - acc: 0.589 - ETA: 5s - loss: 0.6676 - acc: 0.589 - ETA: 5s - loss: 0.6676 - acc: 0.589 - ETA: 5s - loss: 0.6676 - acc: 0.589 - ETA: 5s - loss: 0.6676 - acc: 0.589 - ETA: 5s - loss: 0.6676 - acc: 0.589 - ETA: 4s - loss: 0.6676 - acc: 0.589 - ETA: 4s - loss: 0.6676 - acc: 0.589 - ETA: 4s - loss: 0.6676 - acc: 0.589 - ETA: 4s - loss: 0.6676 - acc: 0.589 - ETA: 4s - loss: 0.6676 - acc: 0.589 - ETA: 4s - loss: 0.6676 - acc: 0.589 - ETA: 4s - loss: 0.6676 - acc: 0.589 - ETA: 4s - loss: 0.6676 - acc: 0.589 - ETA: 4s - loss: 0.6676 - acc: 0.589 - ETA: 4s - loss: 0.6676 - acc: 0.589 - ETA: 4s - loss: 0.6676 - acc: 0.589 - ETA: 4s - loss: 0.6676 - acc: 0.589 - ETA: 4s - loss: 0.6676 - acc: 0.589 - ETA: 4s - loss: 0.6676 - acc: 0.589 - ETA: 4s - loss: 0.6676 - acc: 0.589 - ETA: 3s - loss: 0.6676 - acc: 0.589 - ETA: 3s - loss: 0.6676 - acc: 0.589 - ETA: 3s - loss: 0.6676 - acc: 0.589 - ETA: 3s - loss: 0.6676 - acc: 0.589 - ETA: 3s - loss: 0.6676 - acc: 0.589 - ETA: 3s - loss: 0.6676 - acc: 0.589 - ETA: 3s - loss: 0.6676 - acc: 0.589 - ETA: 3s - loss: 0.6676 - acc: 0.589 - ETA: 3s - loss: 0.6676 - acc: 0.589 - ETA: 3s - loss: 0.6675 - acc: 0.589 - ETA: 3s - loss: 0.6675 - acc: 0.589 - ETA: 3s - loss: 0.6675 - acc: 0.589 - ETA: 3s - loss: 0.6676 - acc: 0.589 - ETA: 3s - loss: 0.6675 - acc: 0.589 - ETA: 3s - loss: 0.6675 - acc: 0.589 - ETA: 2s - loss: 0.6675 - acc: 0.589 - ETA: 2s - loss: 0.6675 - acc: 0.589 - ETA: 2s - loss: 0.6675 - acc: 0.589 - ETA: 2s - loss: 0.6675 - acc: 0.589 - ETA: 2s - loss: 0.6675 - acc: 0.589 - ETA: 2s - loss: 0.6675 - acc: 0.589 - ETA: 2s - loss: 0.6675 - acc: 0.589 - ETA: 2s - loss: 0.6675 - acc: 0.589 - ETA: 2s - loss: 0.6675 - acc: 0.589 - ETA: 2s - loss: 0.6675 - acc: 0.58972497802/2497802 [==============================] - ETA: 2s - loss: 0.6675 - acc: 0.589 - ETA: 2s - loss: 0.6675 - acc: 0.589 - ETA: 2s - loss: 0.6674 - acc: 0.589 - ETA: 2s - loss: 0.6675 - acc: 0.589 - ETA: 2s - loss: 0.6675 - acc: 0.589 - ETA: 1s - loss: 0.6674 - acc: 0.589 - ETA: 1s - loss: 0.6674 - acc: 0.589 - ETA: 1s - loss: 0.6674 - acc: 0.589 - ETA: 1s - loss: 0.6674 - acc: 0.589 - ETA: 1s - loss: 0.6674 - acc: 0.589 - ETA: 1s - loss: 0.6675 - acc: 0.589 - ETA: 1s - loss: 0.6675 - acc: 0.589 - ETA: 1s - loss: 0.6675 - acc: 0.589 - ETA: 1s - loss: 0.6674 - acc: 0.589 - ETA: 1s - loss: 0.6675 - acc: 0.589 - ETA: 1s - loss: 0.6675 - acc: 0.589 - ETA: 1s - loss: 0.6674 - acc: 0.589 - ETA: 1s - loss: 0.6675 - acc: 0.589 - ETA: 1s - loss: 0.6675 - acc: 0.589 - ETA: 0s - loss: 0.6675 - acc: 0.589 - ETA: 0s - loss: 0.6675 - acc: 0.589 - ETA: 0s - loss: 0.6675 - acc: 0.589 - ETA: 0s - loss: 0.6674 - acc: 0.589 - ETA: 0s - loss: 0.6674 - acc: 0.589 - ETA: 0s - loss: 0.6675 - acc: 0.589 - ETA: 0s - loss: 0.6675 - acc: 0.589 - ETA: 0s - loss: 0.6675 - acc: 0.589 - ETA: 0s - loss: 0.6674 - acc: 0.589 - ETA: 0s - loss: 0.6674 - acc: 0.589 - ETA: 0s - loss: 0.6675 - acc: 0.589 - ETA: 0s - loss: 0.6675 - acc: 0.589 - ETA: 0s - loss: 0.6674 - acc: 0.589 - ETA: 0s - loss: 0.6674 - acc: 0.589 - ETA: 0s - loss: 0.6675 - acc: 0.589 - 18s 7us/step - loss: 0.6675 - acc: 0.5897 - val_loss: 0.6539 - val_acc: 0.6224 Epoch 7/100 2150000/2497802 [========================>.....] - ETA: 37s - loss: 0.6685 - acc: 0.58 - ETA: 26s - loss: 0.6674 - acc: 0.59 - ETA: 22s - loss: 0.6675 - acc: 0.59 - ETA: 20s - loss: 0.6686 - acc: 0.58 - ETA: 19s - loss: 0.6684 - acc: 0.58 - ETA: 19s - loss: 0.6684 - acc: 0.58 - ETA: 18s - loss: 0.6683 - acc: 0.58 - ETA: 18s - loss: 0.6684 - acc: 0.58 - ETA: 18s - loss: 0.6686 - acc: 0.58 - ETA: 18s - loss: 0.6684 - acc: 0.58 - ETA: 17s - loss: 0.6684 - acc: 0.58 - ETA: 17s - loss: 0.6684 - acc: 0.58 - ETA: 17s - loss: 0.6684 - acc: 0.58 - ETA: 17s - loss: 0.6684 - acc: 0.58 - ETA: 17s - loss: 0.6682 - acc: 0.58 - ETA: 17s - loss: 0.6682 - acc: 0.58 - ETA: 16s - loss: 0.6680 - acc: 0.58 - ETA: 16s - loss: 0.6679 - acc: 0.58 - ETA: 16s - loss: 0.6678 - acc: 0.59 - ETA: 16s - loss: 0.6677 - acc: 0.59 - ETA: 16s - loss: 0.6676 - acc: 0.59 - ETA: 16s - loss: 0.6676 - acc: 0.59 - ETA: 16s - loss: 0.6675 - acc: 0.59 - ETA: 15s - loss: 0.6675 - acc: 0.59 - ETA: 15s - loss: 0.6676 - acc: 0.58 - ETA: 15s - loss: 0.6676 - acc: 0.58 - ETA: 15s - loss: 0.6675 - acc: 0.58 - ETA: 15s - loss: 0.6675 - acc: 0.59 - ETA: 15s - loss: 0.6675 - acc: 0.59 - ETA: 15s - loss: 0.6677 - acc: 0.58 - ETA: 15s - loss: 0.6677 - acc: 0.58 - ETA: 15s - loss: 0.6677 - acc: 0.58 - ETA: 15s - loss: 0.6678 - acc: 0.58 - ETA: 15s - loss: 0.6679 - acc: 0.58 - ETA: 15s - loss: 0.6679 - acc: 0.58 - ETA: 14s - loss: 0.6678 - acc: 0.58 - ETA: 14s - loss: 0.6679 - acc: 0.58 - ETA: 14s - loss: 0.6679 - acc: 0.58 - ETA: 14s - loss: 0.6679 - acc: 0.58 - ETA: 14s - loss: 0.6679 - acc: 0.58 - ETA: 14s - loss: 0.6679 - acc: 0.58 - ETA: 14s - loss: 0.6678 - acc: 0.58 - ETA: 14s - loss: 0.6680 - acc: 0.58 - ETA: 14s - loss: 0.6679 - acc: 0.58 - ETA: 14s - loss: 0.6679 - acc: 0.58 - ETA: 14s - loss: 0.6679 - acc: 0.58 - ETA: 14s - loss: 0.6679 - acc: 0.58 - ETA: 14s - loss: 0.6678 - acc: 0.58 - ETA: 13s - loss: 0.6678 - acc: 0.58 - ETA: 13s - loss: 0.6678 - acc: 0.58 - ETA: 13s - loss: 0.6679 - acc: 0.58 - ETA: 13s - loss: 0.6678 - acc: 0.58 - ETA: 13s - loss: 0.6678 - acc: 0.58 - ETA: 13s - loss: 0.6678 - acc: 0.58 - ETA: 13s - loss: 0.6678 - acc: 0.58 - ETA: 13s - loss: 0.6678 - acc: 0.58 - ETA: 13s - loss: 0.6678 - acc: 0.58 - ETA: 13s - loss: 0.6678 - acc: 0.58 - ETA: 13s - loss: 0.6678 - acc: 0.58 - ETA: 13s - loss: 0.6677 - acc: 0.58 - ETA: 13s - loss: 0.6677 - acc: 0.58 - ETA: 12s - loss: 0.6677 - acc: 0.58 - ETA: 12s - loss: 0.6677 - acc: 0.58 - ETA: 12s - loss: 0.6677 - acc: 0.58 - ETA: 12s - loss: 0.6677 - acc: 0.58 - ETA: 12s - loss: 0.6676 - acc: 0.58 - ETA: 12s - loss: 0.6676 - acc: 0.58 - ETA: 12s - loss: 0.6676 - acc: 0.58 - ETA: 12s - loss: 0.6676 - acc: 0.58 - ETA: 12s - loss: 0.6675 - acc: 0.58 - ETA: 12s - loss: 0.6676 - acc: 0.58 - ETA: 12s - loss: 0.6675 - acc: 0.58 - ETA: 12s - loss: 0.6675 - acc: 0.58 - ETA: 12s - loss: 0.6674 - acc: 0.58 - ETA: 12s - loss: 0.6674 - acc: 0.58 - ETA: 11s - loss: 0.6675 - acc: 0.58 - ETA: 11s - loss: 0.6674 - acc: 0.58 - ETA: 11s - loss: 0.6675 - acc: 0.58 - ETA: 11s - loss: 0.6675 - acc: 0.58 - ETA: 11s - loss: 0.6675 - acc: 0.58 - ETA: 11s - loss: 0.6675 - acc: 0.58 - ETA: 11s - loss: 0.6675 - acc: 0.58 - ETA: 11s - loss: 0.6675 - acc: 0.58 - ETA: 11s - loss: 0.6676 - acc: 0.58 - ETA: 11s - loss: 0.6675 - acc: 0.58 - ETA: 11s - loss: 0.6675 - acc: 0.58 - ETA: 11s - loss: 0.6675 - acc: 0.58 - ETA: 11s - loss: 0.6675 - acc: 0.58 - ETA: 11s - loss: 0.6675 - acc: 0.58 - ETA: 10s - loss: 0.6675 - acc: 0.58 - ETA: 10s - loss: 0.6675 - acc: 0.58 - ETA: 10s - loss: 0.6675 - acc: 0.58 - ETA: 10s - loss: 0.6675 - acc: 0.58 - ETA: 10s - loss: 0.6676 - acc: 0.58 - ETA: 10s - loss: 0.6675 - acc: 0.58 - ETA: 10s - loss: 0.6675 - acc: 0.58 - ETA: 10s - loss: 0.6675 - acc: 0.58 - ETA: 10s - loss: 0.6675 - acc: 0.58 - ETA: 10s - loss: 0.6675 - acc: 0.58 - ETA: 10s - loss: 0.6674 - acc: 0.58 - ETA: 10s - loss: 0.6674 - acc: 0.58 - ETA: 10s - loss: 0.6675 - acc: 0.58 - ETA: 10s - loss: 0.6675 - acc: 0.58 - ETA: 10s - loss: 0.6675 - acc: 0.58 - ETA: 9s - loss: 0.6675 - acc: 0.5894 - ETA: 9s - loss: 0.6675 - acc: 0.589 - ETA: 9s - loss: 0.6676 - acc: 0.589 - ETA: 9s - loss: 0.6676 - acc: 0.589 - ETA: 9s - loss: 0.6675 - acc: 0.589 - ETA: 9s - loss: 0.6675 - acc: 0.589 - ETA: 9s - loss: 0.6675 - acc: 0.589 - ETA: 9s - loss: 0.6675 - acc: 0.589 - ETA: 9s - loss: 0.6675 - acc: 0.589 - ETA: 9s - loss: 0.6675 - acc: 0.589 - ETA: 9s - loss: 0.6675 - acc: 0.589 - ETA: 9s - loss: 0.6675 - acc: 0.589 - ETA: 9s - loss: 0.6675 - acc: 0.589 - ETA: 9s - loss: 0.6675 - acc: 0.589 - ETA: 8s - loss: 0.6675 - acc: 0.589 - ETA: 8s - loss: 0.6675 - acc: 0.589 - ETA: 8s - loss: 0.6675 - acc: 0.589 - ETA: 8s - loss: 0.6675 - acc: 0.589 - ETA: 8s - loss: 0.6674 - acc: 0.589 - ETA: 8s - loss: 0.6674 - acc: 0.589 - ETA: 8s - loss: 0.6674 - acc: 0.589 - ETA: 8s - loss: 0.6674 - acc: 0.589 - ETA: 8s - loss: 0.6675 - acc: 0.589 - ETA: 8s - loss: 0.6675 - acc: 0.589 - ETA: 8s - loss: 0.6675 - acc: 0.589 - ETA: 8s - loss: 0.6675 - acc: 0.589 - ETA: 8s - loss: 0.6675 - acc: 0.589 - ETA: 8s - loss: 0.6675 - acc: 0.589 - ETA: 8s - loss: 0.6675 - acc: 0.589 - ETA: 7s - loss: 0.6675 - acc: 0.589 - ETA: 7s - loss: 0.6675 - acc: 0.589 - ETA: 7s - loss: 0.6675 - acc: 0.589 - ETA: 7s - loss: 0.6674 - acc: 0.589 - ETA: 7s - loss: 0.6674 - acc: 0.589 - ETA: 7s - loss: 0.6675 - acc: 0.589 - ETA: 7s - loss: 0.6675 - acc: 0.589 - ETA: 7s - loss: 0.6675 - acc: 0.589 - ETA: 7s - loss: 0.6675 - acc: 0.589 - ETA: 7s - loss: 0.6675 - acc: 0.589 - ETA: 7s - loss: 0.6675 - acc: 0.589 - ETA: 7s - loss: 0.6675 - acc: 0.589 - ETA: 7s - loss: 0.6675 - acc: 0.589 - ETA: 7s - loss: 0.6675 - acc: 0.589 - ETA: 6s - loss: 0.6675 - acc: 0.589 - ETA: 6s - loss: 0.6675 - acc: 0.589 - ETA: 6s - loss: 0.6675 - acc: 0.589 - ETA: 6s - loss: 0.6675 - acc: 0.589 - ETA: 6s - loss: 0.6675 - acc: 0.589 - ETA: 6s - loss: 0.6675 - acc: 0.589 - ETA: 6s - loss: 0.6675 - acc: 0.589 - ETA: 6s - loss: 0.6675 - acc: 0.589 - ETA: 6s - loss: 0.6675 - acc: 0.589 - ETA: 6s - loss: 0.6675 - acc: 0.589 - ETA: 6s - loss: 0.6675 - acc: 0.589 - ETA: 6s - loss: 0.6675 - acc: 0.589 - ETA: 6s - loss: 0.6675 - acc: 0.589 - ETA: 6s - loss: 0.6675 - acc: 0.589 - ETA: 6s - loss: 0.6675 - acc: 0.589 - ETA: 5s - loss: 0.6675 - acc: 0.589 - ETA: 5s - loss: 0.6675 - acc: 0.589 - ETA: 5s - loss: 0.6675 - acc: 0.589 - ETA: 5s - loss: 0.6675 - acc: 0.589 - ETA: 5s - loss: 0.6674 - acc: 0.589 - ETA: 5s - loss: 0.6674 - acc: 0.589 - ETA: 5s - loss: 0.6674 - acc: 0.589 - ETA: 5s - loss: 0.6674 - acc: 0.589 - ETA: 5s - loss: 0.6674 - acc: 0.589 - ETA: 5s - loss: 0.6674 - acc: 0.589 - ETA: 5s - loss: 0.6674 - acc: 0.589 - ETA: 5s - loss: 0.6674 - acc: 0.589 - ETA: 5s - loss: 0.6674 - acc: 0.589 - ETA: 5s - loss: 0.6674 - acc: 0.589 - ETA: 4s - loss: 0.6674 - acc: 0.589 - ETA: 4s - loss: 0.6674 - acc: 0.589 - ETA: 4s - loss: 0.6674 - acc: 0.589 - ETA: 4s - loss: 0.6674 - acc: 0.589 - ETA: 4s - loss: 0.6674 - acc: 0.589 - ETA: 4s - loss: 0.6674 - acc: 0.589 - ETA: 4s - loss: 0.6674 - acc: 0.589 - ETA: 4s - loss: 0.6674 - acc: 0.589 - ETA: 4s - loss: 0.6674 - acc: 0.589 - ETA: 4s - loss: 0.6674 - acc: 0.589 - ETA: 4s - loss: 0.6674 - acc: 0.589 - ETA: 4s - loss: 0.6674 - acc: 0.589 - ETA: 4s - loss: 0.6674 - acc: 0.589 - ETA: 4s - loss: 0.6674 - acc: 0.589 - ETA: 4s - loss: 0.6674 - acc: 0.589 - ETA: 3s - loss: 0.6674 - acc: 0.589 - ETA: 3s - loss: 0.6674 - acc: 0.589 - ETA: 3s - loss: 0.6674 - acc: 0.589 - ETA: 3s - loss: 0.6674 - acc: 0.589 - ETA: 3s - loss: 0.6674 - acc: 0.589 - ETA: 3s - loss: 0.6674 - acc: 0.589 - ETA: 3s - loss: 0.6674 - acc: 0.589 - ETA: 3s - loss: 0.6674 - acc: 0.589 - ETA: 3s - loss: 0.6674 - acc: 0.589 - ETA: 3s - loss: 0.6674 - acc: 0.589 - ETA: 3s - loss: 0.6674 - acc: 0.589 - ETA: 3s - loss: 0.6674 - acc: 0.589 - ETA: 3s - loss: 0.6673 - acc: 0.589 - ETA: 3s - loss: 0.6674 - acc: 0.589 - ETA: 3s - loss: 0.6674 - acc: 0.589 - ETA: 2s - loss: 0.6674 - acc: 0.589 - ETA: 2s - loss: 0.6673 - acc: 0.589 - ETA: 2s - loss: 0.6674 - acc: 0.589 - ETA: 2s - loss: 0.6674 - acc: 0.589 - ETA: 2s - loss: 0.6673 - acc: 0.589 - ETA: 2s - loss: 0.6673 - acc: 0.589 - ETA: 2s - loss: 0.6673 - acc: 0.589 - ETA: 2s - loss: 0.6673 - acc: 0.589 - ETA: 2s - loss: 0.6673 - acc: 0.58952497802/2497802 [==============================] - ETA: 2s - loss: 0.6673 - acc: 0.589 - ETA: 2s - loss: 0.6674 - acc: 0.589 - ETA: 2s - loss: 0.6674 - acc: 0.589 - ETA: 2s - loss: 0.6674 - acc: 0.589 - ETA: 2s - loss: 0.6674 - acc: 0.589 - ETA: 1s - loss: 0.6673 - acc: 0.589 - ETA: 1s - loss: 0.6674 - acc: 0.589 - ETA: 1s - loss: 0.6674 - acc: 0.589 - ETA: 1s - loss: 0.6674 - acc: 0.589 - ETA: 1s - loss: 0.6674 - acc: 0.589 - ETA: 1s - loss: 0.6674 - acc: 0.589 - ETA: 1s - loss: 0.6674 - acc: 0.589 - ETA: 1s - loss: 0.6674 - acc: 0.589 - ETA: 1s - loss: 0.6674 - acc: 0.589 - ETA: 1s - loss: 0.6674 - acc: 0.589 - ETA: 1s - loss: 0.6674 - acc: 0.589 - ETA: 1s - loss: 0.6674 - acc: 0.589 - ETA: 1s - loss: 0.6674 - acc: 0.589 - ETA: 1s - loss: 0.6674 - acc: 0.589 - ETA: 1s - loss: 0.6674 - acc: 0.589 - ETA: 0s - loss: 0.6674 - acc: 0.589 - ETA: 0s - loss: 0.6674 - acc: 0.589 - ETA: 0s - loss: 0.6674 - acc: 0.589 - ETA: 0s - loss: 0.6674 - acc: 0.589 - ETA: 0s - loss: 0.6674 - acc: 0.589 - ETA: 0s - loss: 0.6674 - acc: 0.589 - ETA: 0s - loss: 0.6674 - acc: 0.589 - ETA: 0s - loss: 0.6674 - acc: 0.589 - ETA: 0s - loss: 0.6674 - acc: 0.589 - ETA: 0s - loss: 0.6674 - acc: 0.589 - ETA: 0s - loss: 0.6674 - acc: 0.589 - ETA: 0s - loss: 0.6674 - acc: 0.589 - ETA: 0s - loss: 0.6674 - acc: 0.589 - ETA: 0s - loss: 0.6674 - acc: 0.589 - 18s 7us/step - loss: 0.6674 - acc: 0.5895 - val_loss: 0.6537 - val_acc: 0.6230 Epoch 8/100 2150000/2497802 [========================>.....] - ETA: 37s - loss: 0.6683 - acc: 0.58 - ETA: 25s - loss: 0.6680 - acc: 0.58 - ETA: 21s - loss: 0.6668 - acc: 0.58 - ETA: 20s - loss: 0.6669 - acc: 0.58 - ETA: 19s - loss: 0.6661 - acc: 0.59 - ETA: 19s - loss: 0.6667 - acc: 0.58 - ETA: 18s - loss: 0.6669 - acc: 0.58 - ETA: 18s - loss: 0.6669 - acc: 0.58 - ETA: 18s - loss: 0.6674 - acc: 0.58 - ETA: 17s - loss: 0.6675 - acc: 0.58 - ETA: 17s - loss: 0.6675 - acc: 0.58 - ETA: 17s - loss: 0.6679 - acc: 0.58 - ETA: 17s - loss: 0.6676 - acc: 0.58 - ETA: 17s - loss: 0.6675 - acc: 0.58 - ETA: 17s - loss: 0.6675 - acc: 0.58 - ETA: 17s - loss: 0.6674 - acc: 0.58 - ETA: 17s - loss: 0.6675 - acc: 0.58 - ETA: 16s - loss: 0.6675 - acc: 0.58 - ETA: 16s - loss: 0.6676 - acc: 0.58 - ETA: 16s - loss: 0.6676 - acc: 0.58 - ETA: 16s - loss: 0.6676 - acc: 0.58 - ETA: 16s - loss: 0.6674 - acc: 0.58 - ETA: 16s - loss: 0.6675 - acc: 0.58 - ETA: 16s - loss: 0.6675 - acc: 0.58 - ETA: 16s - loss: 0.6675 - acc: 0.58 - ETA: 16s - loss: 0.6673 - acc: 0.58 - ETA: 16s - loss: 0.6674 - acc: 0.58 - ETA: 16s - loss: 0.6674 - acc: 0.58 - ETA: 15s - loss: 0.6673 - acc: 0.58 - ETA: 15s - loss: 0.6674 - acc: 0.58 - ETA: 15s - loss: 0.6673 - acc: 0.58 - ETA: 15s - loss: 0.6673 - acc: 0.58 - ETA: 15s - loss: 0.6673 - acc: 0.58 - ETA: 15s - loss: 0.6672 - acc: 0.58 - ETA: 15s - loss: 0.6672 - acc: 0.58 - ETA: 15s - loss: 0.6672 - acc: 0.58 - ETA: 15s - loss: 0.6672 - acc: 0.58 - ETA: 15s - loss: 0.6672 - acc: 0.58 - ETA: 15s - loss: 0.6672 - acc: 0.58 - ETA: 15s - loss: 0.6670 - acc: 0.58 - ETA: 14s - loss: 0.6670 - acc: 0.58 - ETA: 14s - loss: 0.6670 - acc: 0.58 - ETA: 14s - loss: 0.6669 - acc: 0.58 - ETA: 14s - loss: 0.6670 - acc: 0.58 - ETA: 14s - loss: 0.6669 - acc: 0.58 - ETA: 14s - loss: 0.6668 - acc: 0.58 - ETA: 14s - loss: 0.6668 - acc: 0.58 - ETA: 14s - loss: 0.6668 - acc: 0.58 - ETA: 14s - loss: 0.6668 - acc: 0.58 - ETA: 14s - loss: 0.6668 - acc: 0.59 - ETA: 14s - loss: 0.6668 - acc: 0.59 - ETA: 14s - loss: 0.6668 - acc: 0.59 - ETA: 14s - loss: 0.6669 - acc: 0.58 - ETA: 13s - loss: 0.6669 - acc: 0.59 - ETA: 13s - loss: 0.6669 - acc: 0.58 - ETA: 13s - loss: 0.6670 - acc: 0.58 - ETA: 13s - loss: 0.6671 - acc: 0.58 - ETA: 13s - loss: 0.6671 - acc: 0.58 - ETA: 13s - loss: 0.6670 - acc: 0.58 - ETA: 13s - loss: 0.6670 - acc: 0.58 - ETA: 13s - loss: 0.6670 - acc: 0.58 - ETA: 13s - loss: 0.6670 - acc: 0.58 - ETA: 13s - loss: 0.6670 - acc: 0.58 - ETA: 13s - loss: 0.6669 - acc: 0.58 - ETA: 13s - loss: 0.6669 - acc: 0.58 - ETA: 13s - loss: 0.6669 - acc: 0.58 - ETA: 12s - loss: 0.6669 - acc: 0.58 - ETA: 12s - loss: 0.6669 - acc: 0.58 - ETA: 12s - loss: 0.6669 - acc: 0.58 - ETA: 12s - loss: 0.6670 - acc: 0.58 - ETA: 12s - loss: 0.6670 - acc: 0.58 - ETA: 12s - loss: 0.6670 - acc: 0.58 - ETA: 12s - loss: 0.6671 - acc: 0.58 - ETA: 12s - loss: 0.6671 - acc: 0.58 - ETA: 12s - loss: 0.6671 - acc: 0.58 - ETA: 12s - loss: 0.6671 - acc: 0.58 - ETA: 12s - loss: 0.6672 - acc: 0.58 - ETA: 12s - loss: 0.6672 - acc: 0.58 - ETA: 12s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6671 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 9s - loss: 0.6672 - acc: 0.5897 - ETA: 9s - loss: 0.6672 - acc: 0.589 - ETA: 9s - loss: 0.6672 - acc: 0.589 - ETA: 9s - loss: 0.6672 - acc: 0.589 - ETA: 9s - loss: 0.6672 - acc: 0.589 - ETA: 9s - loss: 0.6672 - acc: 0.589 - ETA: 9s - loss: 0.6672 - acc: 0.589 - ETA: 9s - loss: 0.6672 - acc: 0.589 - ETA: 9s - loss: 0.6672 - acc: 0.589 - ETA: 9s - loss: 0.6672 - acc: 0.589 - ETA: 9s - loss: 0.6672 - acc: 0.589 - ETA: 9s - loss: 0.6671 - acc: 0.589 - ETA: 9s - loss: 0.6671 - acc: 0.589 - ETA: 9s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6670 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6670 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6672 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6672 - acc: 0.589 - ETA: 7s - loss: 0.6672 - acc: 0.589 - ETA: 7s - loss: 0.6672 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6672 - acc: 0.589 - ETA: 5s - loss: 0.6672 - acc: 0.589 - ETA: 5s - loss: 0.6672 - acc: 0.589 - ETA: 5s - loss: 0.6672 - acc: 0.589 - ETA: 5s - loss: 0.6672 - acc: 0.589 - ETA: 5s - loss: 0.6672 - acc: 0.589 - ETA: 5s - loss: 0.6672 - acc: 0.589 - ETA: 5s - loss: 0.6672 - acc: 0.589 - ETA: 5s - loss: 0.6672 - acc: 0.589 - ETA: 5s - loss: 0.6672 - acc: 0.589 - ETA: 5s - loss: 0.6672 - acc: 0.589 - ETA: 5s - loss: 0.6672 - acc: 0.589 - ETA: 5s - loss: 0.6672 - acc: 0.589 - ETA: 5s - loss: 0.6672 - acc: 0.589 - ETA: 5s - loss: 0.6672 - acc: 0.589 - ETA: 4s - loss: 0.6672 - acc: 0.589 - ETA: 4s - loss: 0.6672 - acc: 0.589 - ETA: 4s - loss: 0.6672 - acc: 0.589 - ETA: 4s - loss: 0.6672 - acc: 0.589 - ETA: 4s - loss: 0.6672 - acc: 0.589 - ETA: 4s - loss: 0.6672 - acc: 0.589 - ETA: 4s - loss: 0.6672 - acc: 0.589 - ETA: 4s - loss: 0.6672 - acc: 0.589 - ETA: 4s - loss: 0.6672 - acc: 0.589 - ETA: 4s - loss: 0.6672 - acc: 0.589 - ETA: 4s - loss: 0.6672 - acc: 0.589 - ETA: 4s - loss: 0.6672 - acc: 0.589 - ETA: 4s - loss: 0.6672 - acc: 0.589 - ETA: 4s - loss: 0.6672 - acc: 0.589 - ETA: 3s - loss: 0.6672 - acc: 0.589 - ETA: 3s - loss: 0.6672 - acc: 0.589 - ETA: 3s - loss: 0.6672 - acc: 0.589 - ETA: 3s - loss: 0.6672 - acc: 0.589 - ETA: 3s - loss: 0.6672 - acc: 0.589 - ETA: 3s - loss: 0.6672 - acc: 0.589 - ETA: 3s - loss: 0.6672 - acc: 0.589 - ETA: 3s - loss: 0.6672 - acc: 0.589 - ETA: 3s - loss: 0.6672 - acc: 0.589 - ETA: 3s - loss: 0.6672 - acc: 0.589 - ETA: 3s - loss: 0.6672 - acc: 0.589 - ETA: 3s - loss: 0.6672 - acc: 0.589 - ETA: 3s - loss: 0.6672 - acc: 0.589 - ETA: 3s - loss: 0.6672 - acc: 0.589 - ETA: 2s - loss: 0.6672 - acc: 0.589 - ETA: 2s - loss: 0.6672 - acc: 0.589 - ETA: 2s - loss: 0.6672 - acc: 0.589 - ETA: 2s - loss: 0.6672 - acc: 0.589 - ETA: 2s - loss: 0.6672 - acc: 0.589 - ETA: 2s - loss: 0.6672 - acc: 0.589 - ETA: 2s - loss: 0.6672 - acc: 0.589 - ETA: 2s - loss: 0.6672 - acc: 0.589 - ETA: 2s - loss: 0.6672 - acc: 0.58962497802/2497802 [==============================] - ETA: 2s - loss: 0.6672 - acc: 0.589 - ETA: 2s - loss: 0.6672 - acc: 0.589 - ETA: 2s - loss: 0.6672 - acc: 0.589 - ETA: 2s - loss: 0.6672 - acc: 0.589 - ETA: 2s - loss: 0.6672 - acc: 0.589 - ETA: 1s - loss: 0.6672 - acc: 0.589 - ETA: 1s - loss: 0.6672 - acc: 0.589 - ETA: 1s - loss: 0.6672 - acc: 0.589 - ETA: 1s - loss: 0.6672 - acc: 0.589 - ETA: 1s - loss: 0.6671 - acc: 0.589 - ETA: 1s - loss: 0.6671 - acc: 0.589 - ETA: 1s - loss: 0.6671 - acc: 0.589 - ETA: 1s - loss: 0.6671 - acc: 0.589 - ETA: 1s - loss: 0.6671 - acc: 0.589 - ETA: 1s - loss: 0.6672 - acc: 0.589 - ETA: 1s - loss: 0.6671 - acc: 0.589 - ETA: 1s - loss: 0.6671 - acc: 0.589 - ETA: 1s - loss: 0.6671 - acc: 0.589 - ETA: 1s - loss: 0.6672 - acc: 0.589 - ETA: 1s - loss: 0.6672 - acc: 0.589 - ETA: 0s - loss: 0.6672 - acc: 0.589 - ETA: 0s - loss: 0.6672 - acc: 0.589 - ETA: 0s - loss: 0.6672 - acc: 0.589 - ETA: 0s - loss: 0.6672 - acc: 0.589 - ETA: 0s - loss: 0.6672 - acc: 0.589 - ETA: 0s - loss: 0.6672 - acc: 0.589 - ETA: 0s - loss: 0.6672 - acc: 0.589 - ETA: 0s - loss: 0.6672 - acc: 0.589 - ETA: 0s - loss: 0.6672 - acc: 0.589 - ETA: 0s - loss: 0.6672 - acc: 0.589 - ETA: 0s - loss: 0.6672 - acc: 0.589 - ETA: 0s - loss: 0.6672 - acc: 0.589 - ETA: 0s - loss: 0.6672 - acc: 0.589 - ETA: 0s - loss: 0.6672 - acc: 0.589 - 18s 7us/step - loss: 0.6672 - acc: 0.5898 - val_loss: 0.6540 - val_acc: 0.6228 Epoch 9/100 2150000/2497802 [========================>.....] - ETA: 33s - loss: 0.6650 - acc: 0.59 - ETA: 24s - loss: 0.6652 - acc: 0.59 - ETA: 21s - loss: 0.6657 - acc: 0.59 - ETA: 19s - loss: 0.6664 - acc: 0.59 - ETA: 19s - loss: 0.6667 - acc: 0.59 - ETA: 18s - loss: 0.6670 - acc: 0.59 - ETA: 18s - loss: 0.6671 - acc: 0.58 - ETA: 17s - loss: 0.6673 - acc: 0.59 - ETA: 17s - loss: 0.6675 - acc: 0.58 - ETA: 17s - loss: 0.6676 - acc: 0.58 - ETA: 17s - loss: 0.6673 - acc: 0.59 - ETA: 17s - loss: 0.6671 - acc: 0.59 - ETA: 17s - loss: 0.6674 - acc: 0.58 - ETA: 17s - loss: 0.6675 - acc: 0.58 - ETA: 17s - loss: 0.6676 - acc: 0.58 - ETA: 17s - loss: 0.6677 - acc: 0.58 - ETA: 17s - loss: 0.6676 - acc: 0.58 - ETA: 17s - loss: 0.6678 - acc: 0.58 - ETA: 17s - loss: 0.6678 - acc: 0.58 - ETA: 16s - loss: 0.6677 - acc: 0.58 - ETA: 16s - loss: 0.6677 - acc: 0.58 - ETA: 16s - loss: 0.6677 - acc: 0.58 - ETA: 16s - loss: 0.6676 - acc: 0.58 - ETA: 16s - loss: 0.6676 - acc: 0.58 - ETA: 16s - loss: 0.6676 - acc: 0.58 - ETA: 16s - loss: 0.6677 - acc: 0.58 - ETA: 16s - loss: 0.6677 - acc: 0.58 - ETA: 15s - loss: 0.6677 - acc: 0.58 - ETA: 15s - loss: 0.6676 - acc: 0.58 - ETA: 15s - loss: 0.6676 - acc: 0.58 - ETA: 15s - loss: 0.6675 - acc: 0.58 - ETA: 15s - loss: 0.6674 - acc: 0.58 - ETA: 15s - loss: 0.6675 - acc: 0.58 - ETA: 15s - loss: 0.6674 - acc: 0.58 - ETA: 15s - loss: 0.6674 - acc: 0.58 - ETA: 15s - loss: 0.6673 - acc: 0.58 - ETA: 15s - loss: 0.6673 - acc: 0.58 - ETA: 14s - loss: 0.6673 - acc: 0.58 - ETA: 14s - loss: 0.6672 - acc: 0.58 - ETA: 14s - loss: 0.6672 - acc: 0.58 - ETA: 14s - loss: 0.6672 - acc: 0.58 - ETA: 14s - loss: 0.6672 - acc: 0.58 - ETA: 14s - loss: 0.6672 - acc: 0.58 - ETA: 14s - loss: 0.6672 - acc: 0.58 - ETA: 14s - loss: 0.6672 - acc: 0.58 - ETA: 14s - loss: 0.6671 - acc: 0.58 - ETA: 14s - loss: 0.6672 - acc: 0.58 - ETA: 14s - loss: 0.6672 - acc: 0.58 - ETA: 14s - loss: 0.6672 - acc: 0.58 - ETA: 14s - loss: 0.6672 - acc: 0.58 - ETA: 13s - loss: 0.6671 - acc: 0.58 - ETA: 13s - loss: 0.6672 - acc: 0.58 - ETA: 13s - loss: 0.6671 - acc: 0.58 - ETA: 13s - loss: 0.6672 - acc: 0.58 - ETA: 13s - loss: 0.6672 - acc: 0.58 - ETA: 13s - loss: 0.6672 - acc: 0.58 - ETA: 13s - loss: 0.6672 - acc: 0.58 - ETA: 13s - loss: 0.6672 - acc: 0.58 - ETA: 13s - loss: 0.6673 - acc: 0.58 - ETA: 13s - loss: 0.6673 - acc: 0.58 - ETA: 13s - loss: 0.6673 - acc: 0.58 - ETA: 13s - loss: 0.6673 - acc: 0.58 - ETA: 12s - loss: 0.6673 - acc: 0.58 - ETA: 12s - loss: 0.6674 - acc: 0.58 - ETA: 12s - loss: 0.6674 - acc: 0.58 - ETA: 12s - loss: 0.6673 - acc: 0.58 - ETA: 12s - loss: 0.6673 - acc: 0.58 - ETA: 12s - loss: 0.6672 - acc: 0.58 - ETA: 12s - loss: 0.6672 - acc: 0.58 - ETA: 12s - loss: 0.6672 - acc: 0.58 - ETA: 12s - loss: 0.6672 - acc: 0.58 - ETA: 12s - loss: 0.6672 - acc: 0.58 - ETA: 12s - loss: 0.6672 - acc: 0.58 - ETA: 12s - loss: 0.6672 - acc: 0.58 - ETA: 12s - loss: 0.6672 - acc: 0.58 - ETA: 12s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6671 - acc: 0.58 - ETA: 10s - loss: 0.6671 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 10s - loss: 0.6671 - acc: 0.58 - ETA: 10s - loss: 0.6671 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 10s - loss: 0.6671 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 9s - loss: 0.6672 - acc: 0.5898 - ETA: 9s - loss: 0.6671 - acc: 0.589 - ETA: 9s - loss: 0.6672 - acc: 0.589 - ETA: 9s - loss: 0.6672 - acc: 0.589 - ETA: 9s - loss: 0.6672 - acc: 0.589 - ETA: 9s - loss: 0.6671 - acc: 0.589 - ETA: 9s - loss: 0.6671 - acc: 0.589 - ETA: 9s - loss: 0.6671 - acc: 0.589 - ETA: 9s - loss: 0.6671 - acc: 0.589 - ETA: 9s - loss: 0.6671 - acc: 0.589 - ETA: 9s - loss: 0.6671 - acc: 0.589 - ETA: 9s - loss: 0.6671 - acc: 0.589 - ETA: 9s - loss: 0.6671 - acc: 0.589 - ETA: 9s - loss: 0.6671 - acc: 0.589 - ETA: 9s - loss: 0.6672 - acc: 0.589 - ETA: 8s - loss: 0.6672 - acc: 0.589 - ETA: 8s - loss: 0.6672 - acc: 0.589 - ETA: 8s - loss: 0.6672 - acc: 0.589 - ETA: 8s - loss: 0.6672 - acc: 0.589 - ETA: 8s - loss: 0.6672 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6671 - acc: 0.589 - ETA: 4s - loss: 0.6671 - acc: 0.589 - ETA: 4s - loss: 0.6671 - acc: 0.589 - ETA: 4s - loss: 0.6670 - acc: 0.589 - ETA: 4s - loss: 0.6671 - acc: 0.589 - ETA: 4s - loss: 0.6671 - acc: 0.589 - ETA: 4s - loss: 0.6671 - acc: 0.589 - ETA: 4s - loss: 0.6670 - acc: 0.589 - ETA: 4s - loss: 0.6670 - acc: 0.589 - ETA: 4s - loss: 0.6670 - acc: 0.589 - ETA: 4s - loss: 0.6671 - acc: 0.589 - ETA: 4s - loss: 0.6671 - acc: 0.589 - ETA: 4s - loss: 0.6671 - acc: 0.589 - ETA: 4s - loss: 0.6671 - acc: 0.589 - ETA: 4s - loss: 0.6671 - acc: 0.589 - ETA: 3s - loss: 0.6671 - acc: 0.589 - ETA: 3s - loss: 0.6671 - acc: 0.589 - ETA: 3s - loss: 0.6671 - acc: 0.589 - ETA: 3s - loss: 0.6671 - acc: 0.589 - ETA: 3s - loss: 0.6671 - acc: 0.589 - ETA: 3s - loss: 0.6671 - acc: 0.589 - ETA: 3s - loss: 0.6671 - acc: 0.589 - ETA: 3s - loss: 0.6671 - acc: 0.589 - ETA: 3s - loss: 0.6671 - acc: 0.589 - ETA: 3s - loss: 0.6671 - acc: 0.589 - ETA: 3s - loss: 0.6671 - acc: 0.589 - ETA: 3s - loss: 0.6671 - acc: 0.589 - ETA: 3s - loss: 0.6671 - acc: 0.589 - ETA: 3s - loss: 0.6671 - acc: 0.589 - ETA: 2s - loss: 0.6671 - acc: 0.589 - ETA: 2s - loss: 0.6671 - acc: 0.589 - ETA: 2s - loss: 0.6671 - acc: 0.589 - ETA: 2s - loss: 0.6671 - acc: 0.589 - ETA: 2s - loss: 0.6671 - acc: 0.589 - ETA: 2s - loss: 0.6671 - acc: 0.589 - ETA: 2s - loss: 0.6671 - acc: 0.589 - ETA: 2s - loss: 0.6671 - acc: 0.589 - ETA: 2s - loss: 0.6671 - acc: 0.58982497802/2497802 [==============================] - ETA: 2s - loss: 0.6671 - acc: 0.589 - ETA: 2s - loss: 0.6671 - acc: 0.589 - ETA: 2s - loss: 0.6671 - acc: 0.589 - ETA: 2s - loss: 0.6671 - acc: 0.589 - ETA: 2s - loss: 0.6671 - acc: 0.589 - ETA: 1s - loss: 0.6671 - acc: 0.589 - ETA: 1s - loss: 0.6671 - acc: 0.589 - ETA: 1s - loss: 0.6671 - acc: 0.589 - ETA: 1s - loss: 0.6671 - acc: 0.589 - ETA: 1s - loss: 0.6671 - acc: 0.589 - ETA: 1s - loss: 0.6671 - acc: 0.589 - ETA: 1s - loss: 0.6671 - acc: 0.589 - ETA: 1s - loss: 0.6671 - acc: 0.589 - ETA: 1s - loss: 0.6671 - acc: 0.589 - ETA: 1s - loss: 0.6671 - acc: 0.589 - ETA: 1s - loss: 0.6671 - acc: 0.589 - ETA: 1s - loss: 0.6671 - acc: 0.589 - ETA: 1s - loss: 0.6671 - acc: 0.589 - ETA: 1s - loss: 0.6671 - acc: 0.589 - ETA: 1s - loss: 0.6671 - acc: 0.589 - ETA: 0s - loss: 0.6671 - acc: 0.589 - ETA: 0s - loss: 0.6671 - acc: 0.589 - ETA: 0s - loss: 0.6671 - acc: 0.589 - ETA: 0s - loss: 0.6671 - acc: 0.589 - ETA: 0s - loss: 0.6671 - acc: 0.589 - ETA: 0s - loss: 0.6671 - acc: 0.589 - ETA: 0s - loss: 0.6671 - acc: 0.589 - ETA: 0s - loss: 0.6671 - acc: 0.589 - ETA: 0s - loss: 0.6671 - acc: 0.589 - ETA: 0s - loss: 0.6671 - acc: 0.589 - ETA: 0s - loss: 0.6671 - acc: 0.589 - ETA: 0s - loss: 0.6671 - acc: 0.589 - ETA: 0s - loss: 0.6671 - acc: 0.589 - ETA: 0s - loss: 0.6671 - acc: 0.589 - 18s 7us/step - loss: 0.6671 - acc: 0.5897 - val_loss: 0.6538 - val_acc: 0.6232 Epoch 10/100 2150000/2497802 [========================>.....] - ETA: 37s - loss: 0.6687 - acc: 0.58 - ETA: 26s - loss: 0.6671 - acc: 0.58 - ETA: 22s - loss: 0.6678 - acc: 0.58 - ETA: 21s - loss: 0.6668 - acc: 0.58 - ETA: 20s - loss: 0.6670 - acc: 0.58 - ETA: 19s - loss: 0.6673 - acc: 0.58 - ETA: 19s - loss: 0.6670 - acc: 0.58 - ETA: 18s - loss: 0.6668 - acc: 0.58 - ETA: 18s - loss: 0.6670 - acc: 0.58 - ETA: 18s - loss: 0.6672 - acc: 0.58 - ETA: 17s - loss: 0.6674 - acc: 0.58 - ETA: 17s - loss: 0.6676 - acc: 0.58 - ETA: 17s - loss: 0.6675 - acc: 0.58 - ETA: 17s - loss: 0.6674 - acc: 0.58 - ETA: 16s - loss: 0.6674 - acc: 0.58 - ETA: 16s - loss: 0.6671 - acc: 0.58 - ETA: 16s - loss: 0.6671 - acc: 0.58 - ETA: 16s - loss: 0.6671 - acc: 0.58 - ETA: 16s - loss: 0.6670 - acc: 0.59 - ETA: 16s - loss: 0.6668 - acc: 0.59 - ETA: 16s - loss: 0.6668 - acc: 0.59 - ETA: 16s - loss: 0.6667 - acc: 0.59 - ETA: 16s - loss: 0.6667 - acc: 0.59 - ETA: 16s - loss: 0.6668 - acc: 0.59 - ETA: 15s - loss: 0.6668 - acc: 0.59 - ETA: 15s - loss: 0.6668 - acc: 0.59 - ETA: 15s - loss: 0.6669 - acc: 0.59 - ETA: 15s - loss: 0.6668 - acc: 0.59 - ETA: 15s - loss: 0.6667 - acc: 0.59 - ETA: 15s - loss: 0.6668 - acc: 0.59 - ETA: 15s - loss: 0.6668 - acc: 0.59 - ETA: 15s - loss: 0.6669 - acc: 0.59 - ETA: 15s - loss: 0.6669 - acc: 0.59 - ETA: 15s - loss: 0.6670 - acc: 0.59 - ETA: 15s - loss: 0.6669 - acc: 0.59 - ETA: 14s - loss: 0.6668 - acc: 0.59 - ETA: 14s - loss: 0.6668 - acc: 0.59 - ETA: 14s - loss: 0.6668 - acc: 0.59 - ETA: 14s - loss: 0.6668 - acc: 0.59 - ETA: 14s - loss: 0.6669 - acc: 0.59 - ETA: 14s - loss: 0.6669 - acc: 0.59 - ETA: 14s - loss: 0.6668 - acc: 0.59 - ETA: 14s - loss: 0.6668 - acc: 0.59 - ETA: 14s - loss: 0.6668 - acc: 0.59 - ETA: 14s - loss: 0.6668 - acc: 0.59 - ETA: 14s - loss: 0.6668 - acc: 0.59 - ETA: 14s - loss: 0.6669 - acc: 0.59 - ETA: 14s - loss: 0.6668 - acc: 0.59 - ETA: 13s - loss: 0.6668 - acc: 0.59 - ETA: 13s - loss: 0.6668 - acc: 0.59 - ETA: 13s - loss: 0.6667 - acc: 0.59 - ETA: 13s - loss: 0.6667 - acc: 0.59 - ETA: 13s - loss: 0.6667 - acc: 0.59 - ETA: 13s - loss: 0.6667 - acc: 0.59 - ETA: 13s - loss: 0.6667 - acc: 0.59 - ETA: 13s - loss: 0.6667 - acc: 0.59 - ETA: 13s - loss: 0.6667 - acc: 0.59 - ETA: 13s - loss: 0.6667 - acc: 0.59 - ETA: 13s - loss: 0.6667 - acc: 0.59 - ETA: 13s - loss: 0.6667 - acc: 0.59 - ETA: 13s - loss: 0.6668 - acc: 0.59 - ETA: 13s - loss: 0.6667 - acc: 0.59 - ETA: 12s - loss: 0.6668 - acc: 0.59 - ETA: 12s - loss: 0.6668 - acc: 0.59 - ETA: 12s - loss: 0.6668 - acc: 0.59 - ETA: 12s - loss: 0.6668 - acc: 0.59 - ETA: 12s - loss: 0.6668 - acc: 0.59 - ETA: 12s - loss: 0.6668 - acc: 0.59 - ETA: 12s - loss: 0.6668 - acc: 0.59 - ETA: 12s - loss: 0.6667 - acc: 0.59 - ETA: 12s - loss: 0.6667 - acc: 0.59 - ETA: 12s - loss: 0.6667 - acc: 0.59 - ETA: 12s - loss: 0.6668 - acc: 0.59 - ETA: 12s - loss: 0.6668 - acc: 0.59 - ETA: 12s - loss: 0.6668 - acc: 0.59 - ETA: 12s - loss: 0.6668 - acc: 0.59 - ETA: 11s - loss: 0.6668 - acc: 0.59 - ETA: 11s - loss: 0.6669 - acc: 0.59 - ETA: 11s - loss: 0.6669 - acc: 0.59 - ETA: 11s - loss: 0.6669 - acc: 0.59 - ETA: 11s - loss: 0.6669 - acc: 0.59 - ETA: 11s - loss: 0.6669 - acc: 0.59 - ETA: 11s - loss: 0.6669 - acc: 0.59 - ETA: 11s - loss: 0.6668 - acc: 0.59 - ETA: 11s - loss: 0.6668 - acc: 0.59 - ETA: 11s - loss: 0.6668 - acc: 0.59 - ETA: 11s - loss: 0.6668 - acc: 0.59 - ETA: 11s - loss: 0.6667 - acc: 0.59 - ETA: 11s - loss: 0.6668 - acc: 0.59 - ETA: 11s - loss: 0.6669 - acc: 0.59 - ETA: 10s - loss: 0.6669 - acc: 0.59 - ETA: 10s - loss: 0.6669 - acc: 0.59 - ETA: 10s - loss: 0.6669 - acc: 0.59 - ETA: 10s - loss: 0.6669 - acc: 0.59 - ETA: 10s - loss: 0.6669 - acc: 0.59 - ETA: 10s - loss: 0.6669 - acc: 0.59 - ETA: 10s - loss: 0.6669 - acc: 0.59 - ETA: 10s - loss: 0.6669 - acc: 0.59 - ETA: 10s - loss: 0.6669 - acc: 0.59 - ETA: 10s - loss: 0.6669 - acc: 0.59 - ETA: 10s - loss: 0.6669 - acc: 0.59 - ETA: 10s - loss: 0.6669 - acc: 0.59 - ETA: 10s - loss: 0.6669 - acc: 0.59 - ETA: 10s - loss: 0.6669 - acc: 0.59 - ETA: 9s - loss: 0.6668 - acc: 0.5900 - ETA: 9s - loss: 0.6669 - acc: 0.590 - ETA: 9s - loss: 0.6668 - acc: 0.590 - ETA: 9s - loss: 0.6668 - acc: 0.590 - ETA: 9s - loss: 0.6668 - acc: 0.590 - ETA: 9s - loss: 0.6668 - acc: 0.590 - ETA: 9s - loss: 0.6668 - acc: 0.590 - ETA: 9s - loss: 0.6668 - acc: 0.590 - ETA: 9s - loss: 0.6668 - acc: 0.590 - ETA: 9s - loss: 0.6668 - acc: 0.590 - ETA: 9s - loss: 0.6668 - acc: 0.590 - ETA: 9s - loss: 0.6668 - acc: 0.590 - ETA: 9s - loss: 0.6669 - acc: 0.590 - ETA: 9s - loss: 0.6669 - acc: 0.590 - ETA: 8s - loss: 0.6669 - acc: 0.590 - ETA: 8s - loss: 0.6669 - acc: 0.589 - ETA: 8s - loss: 0.6669 - acc: 0.589 - ETA: 8s - loss: 0.6670 - acc: 0.589 - ETA: 8s - loss: 0.6669 - acc: 0.589 - ETA: 8s - loss: 0.6669 - acc: 0.589 - ETA: 8s - loss: 0.6669 - acc: 0.589 - ETA: 8s - loss: 0.6669 - acc: 0.589 - ETA: 8s - loss: 0.6669 - acc: 0.589 - ETA: 8s - loss: 0.6669 - acc: 0.589 - ETA: 8s - loss: 0.6669 - acc: 0.589 - ETA: 8s - loss: 0.6669 - acc: 0.589 - ETA: 8s - loss: 0.6669 - acc: 0.589 - ETA: 8s - loss: 0.6669 - acc: 0.589 - ETA: 8s - loss: 0.6669 - acc: 0.589 - ETA: 7s - loss: 0.6669 - acc: 0.589 - ETA: 7s - loss: 0.6669 - acc: 0.589 - ETA: 7s - loss: 0.6669 - acc: 0.589 - ETA: 7s - loss: 0.6669 - acc: 0.589 - ETA: 7s - loss: 0.6669 - acc: 0.589 - ETA: 7s - loss: 0.6669 - acc: 0.589 - ETA: 7s - loss: 0.6669 - acc: 0.589 - ETA: 7s - loss: 0.6669 - acc: 0.589 - ETA: 7s - loss: 0.6669 - acc: 0.589 - ETA: 7s - loss: 0.6669 - acc: 0.589 - ETA: 7s - loss: 0.6669 - acc: 0.589 - ETA: 7s - loss: 0.6669 - acc: 0.589 - ETA: 7s - loss: 0.6669 - acc: 0.589 - ETA: 7s - loss: 0.6669 - acc: 0.589 - ETA: 6s - loss: 0.6669 - acc: 0.589 - ETA: 6s - loss: 0.6669 - acc: 0.589 - ETA: 6s - loss: 0.6669 - acc: 0.589 - ETA: 6s - loss: 0.6669 - acc: 0.589 - ETA: 6s - loss: 0.6669 - acc: 0.589 - ETA: 6s - loss: 0.6669 - acc: 0.589 - ETA: 6s - loss: 0.6669 - acc: 0.589 - ETA: 6s - loss: 0.6669 - acc: 0.589 - ETA: 6s - loss: 0.6669 - acc: 0.589 - ETA: 6s - loss: 0.6669 - acc: 0.589 - ETA: 6s - loss: 0.6669 - acc: 0.589 - ETA: 6s - loss: 0.6669 - acc: 0.589 - ETA: 6s - loss: 0.6668 - acc: 0.590 - ETA: 6s - loss: 0.6668 - acc: 0.589 - ETA: 6s - loss: 0.6668 - acc: 0.590 - ETA: 5s - loss: 0.6668 - acc: 0.589 - ETA: 5s - loss: 0.6668 - acc: 0.590 - ETA: 5s - loss: 0.6668 - acc: 0.589 - ETA: 5s - loss: 0.6668 - acc: 0.590 - ETA: 5s - loss: 0.6668 - acc: 0.590 - ETA: 5s - loss: 0.6668 - acc: 0.590 - ETA: 5s - loss: 0.6668 - acc: 0.589 - ETA: 5s - loss: 0.6668 - acc: 0.590 - ETA: 5s - loss: 0.6668 - acc: 0.589 - ETA: 5s - loss: 0.6668 - acc: 0.589 - ETA: 5s - loss: 0.6668 - acc: 0.589 - ETA: 5s - loss: 0.6668 - acc: 0.589 - ETA: 5s - loss: 0.6668 - acc: 0.589 - ETA: 5s - loss: 0.6668 - acc: 0.589 - ETA: 4s - loss: 0.6668 - acc: 0.589 - ETA: 4s - loss: 0.6668 - acc: 0.589 - ETA: 4s - loss: 0.6668 - acc: 0.590 - ETA: 4s - loss: 0.6668 - acc: 0.590 - ETA: 4s - loss: 0.6668 - acc: 0.590 - ETA: 4s - loss: 0.6668 - acc: 0.590 - ETA: 4s - loss: 0.6668 - acc: 0.590 - ETA: 4s - loss: 0.6668 - acc: 0.590 - ETA: 4s - loss: 0.6667 - acc: 0.590 - ETA: 4s - loss: 0.6668 - acc: 0.590 - ETA: 4s - loss: 0.6668 - acc: 0.589 - ETA: 4s - loss: 0.6668 - acc: 0.589 - ETA: 4s - loss: 0.6668 - acc: 0.589 - ETA: 4s - loss: 0.6668 - acc: 0.590 - ETA: 4s - loss: 0.6668 - acc: 0.589 - ETA: 3s - loss: 0.6668 - acc: 0.589 - ETA: 3s - loss: 0.6668 - acc: 0.589 - ETA: 3s - loss: 0.6668 - acc: 0.589 - ETA: 3s - loss: 0.6668 - acc: 0.589 - ETA: 3s - loss: 0.6668 - acc: 0.589 - ETA: 3s - loss: 0.6668 - acc: 0.589 - ETA: 3s - loss: 0.6668 - acc: 0.589 - ETA: 3s - loss: 0.6668 - acc: 0.589 - ETA: 3s - loss: 0.6668 - acc: 0.589 - ETA: 3s - loss: 0.6668 - acc: 0.589 - ETA: 3s - loss: 0.6668 - acc: 0.589 - ETA: 3s - loss: 0.6668 - acc: 0.589 - ETA: 3s - loss: 0.6668 - acc: 0.589 - ETA: 3s - loss: 0.6668 - acc: 0.589 - ETA: 2s - loss: 0.6668 - acc: 0.589 - ETA: 2s - loss: 0.6668 - acc: 0.589 - ETA: 2s - loss: 0.6668 - acc: 0.589 - ETA: 2s - loss: 0.6668 - acc: 0.589 - ETA: 2s - loss: 0.6668 - acc: 0.589 - ETA: 2s - loss: 0.6668 - acc: 0.589 - ETA: 2s - loss: 0.6668 - acc: 0.589 - ETA: 2s - loss: 0.6668 - acc: 0.589 - ETA: 2s - loss: 0.6668 - acc: 0.589 - ETA: 2s - loss: 0.6668 - acc: 0.58992497802/2497802 [==============================] - ETA: 2s - loss: 0.6668 - acc: 0.589 - ETA: 2s - loss: 0.6668 - acc: 0.589 - ETA: 2s - loss: 0.6668 - acc: 0.589 - ETA: 2s - loss: 0.6668 - acc: 0.589 - ETA: 2s - loss: 0.6668 - acc: 0.589 - ETA: 1s - loss: 0.6668 - acc: 0.589 - ETA: 1s - loss: 0.6668 - acc: 0.589 - ETA: 1s - loss: 0.6668 - acc: 0.589 - ETA: 1s - loss: 0.6668 - acc: 0.589 - ETA: 1s - loss: 0.6668 - acc: 0.589 - ETA: 1s - loss: 0.6668 - acc: 0.589 - ETA: 1s - loss: 0.6668 - acc: 0.589 - ETA: 1s - loss: 0.6668 - acc: 0.589 - ETA: 1s - loss: 0.6668 - acc: 0.589 - ETA: 1s - loss: 0.6668 - acc: 0.589 - ETA: 1s - loss: 0.6668 - acc: 0.589 - ETA: 1s - loss: 0.6668 - acc: 0.589 - ETA: 1s - loss: 0.6669 - acc: 0.589 - ETA: 1s - loss: 0.6669 - acc: 0.589 - ETA: 1s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - 18s 7us/step - loss: 0.6670 - acc: 0.5896 - val_loss: 0.6550 - val_acc: 0.6232 Epoch 11/100 2150000/2497802 [========================>.....] - ETA: 36s - loss: 0.6695 - acc: 0.58 - ETA: 26s - loss: 0.6671 - acc: 0.58 - ETA: 22s - loss: 0.6668 - acc: 0.58 - ETA: 21s - loss: 0.6673 - acc: 0.58 - ETA: 19s - loss: 0.6675 - acc: 0.58 - ETA: 19s - loss: 0.6673 - acc: 0.58 - ETA: 18s - loss: 0.6676 - acc: 0.58 - ETA: 18s - loss: 0.6677 - acc: 0.58 - ETA: 18s - loss: 0.6675 - acc: 0.59 - ETA: 17s - loss: 0.6674 - acc: 0.58 - ETA: 17s - loss: 0.6671 - acc: 0.58 - ETA: 17s - loss: 0.6670 - acc: 0.58 - ETA: 17s - loss: 0.6671 - acc: 0.58 - ETA: 16s - loss: 0.6672 - acc: 0.58 - ETA: 16s - loss: 0.6672 - acc: 0.58 - ETA: 16s - loss: 0.6671 - acc: 0.59 - ETA: 16s - loss: 0.6671 - acc: 0.59 - ETA: 16s - loss: 0.6671 - acc: 0.59 - ETA: 16s - loss: 0.6671 - acc: 0.59 - ETA: 16s - loss: 0.6672 - acc: 0.59 - ETA: 16s - loss: 0.6671 - acc: 0.59 - ETA: 15s - loss: 0.6670 - acc: 0.59 - ETA: 15s - loss: 0.6669 - acc: 0.58 - ETA: 15s - loss: 0.6670 - acc: 0.59 - ETA: 15s - loss: 0.6670 - acc: 0.59 - ETA: 15s - loss: 0.6670 - acc: 0.59 - ETA: 15s - loss: 0.6669 - acc: 0.59 - ETA: 15s - loss: 0.6667 - acc: 0.59 - ETA: 15s - loss: 0.6667 - acc: 0.59 - ETA: 15s - loss: 0.6667 - acc: 0.59 - ETA: 15s - loss: 0.6668 - acc: 0.59 - ETA: 15s - loss: 0.6668 - acc: 0.59 - ETA: 15s - loss: 0.6668 - acc: 0.59 - ETA: 14s - loss: 0.6668 - acc: 0.58 - ETA: 14s - loss: 0.6669 - acc: 0.58 - ETA: 14s - loss: 0.6669 - acc: 0.58 - ETA: 14s - loss: 0.6669 - acc: 0.58 - ETA: 14s - loss: 0.6669 - acc: 0.58 - ETA: 14s - loss: 0.6670 - acc: 0.58 - ETA: 14s - loss: 0.6670 - acc: 0.58 - ETA: 14s - loss: 0.6670 - acc: 0.58 - ETA: 14s - loss: 0.6670 - acc: 0.58 - ETA: 14s - loss: 0.6670 - acc: 0.58 - ETA: 14s - loss: 0.6671 - acc: 0.58 - ETA: 14s - loss: 0.6671 - acc: 0.58 - ETA: 14s - loss: 0.6669 - acc: 0.58 - ETA: 14s - loss: 0.6669 - acc: 0.58 - ETA: 13s - loss: 0.6669 - acc: 0.58 - ETA: 13s - loss: 0.6669 - acc: 0.58 - ETA: 13s - loss: 0.6669 - acc: 0.58 - ETA: 13s - loss: 0.6669 - acc: 0.58 - ETA: 13s - loss: 0.6669 - acc: 0.58 - ETA: 13s - loss: 0.6669 - acc: 0.58 - ETA: 13s - loss: 0.6669 - acc: 0.58 - ETA: 13s - loss: 0.6670 - acc: 0.58 - ETA: 13s - loss: 0.6670 - acc: 0.58 - ETA: 13s - loss: 0.6670 - acc: 0.58 - ETA: 13s - loss: 0.6670 - acc: 0.58 - ETA: 13s - loss: 0.6670 - acc: 0.58 - ETA: 13s - loss: 0.6669 - acc: 0.58 - ETA: 12s - loss: 0.6669 - acc: 0.58 - ETA: 12s - loss: 0.6669 - acc: 0.58 - ETA: 12s - loss: 0.6669 - acc: 0.58 - ETA: 12s - loss: 0.6668 - acc: 0.58 - ETA: 12s - loss: 0.6668 - acc: 0.58 - ETA: 12s - loss: 0.6668 - acc: 0.58 - ETA: 12s - loss: 0.6668 - acc: 0.58 - ETA: 12s - loss: 0.6668 - acc: 0.58 - ETA: 12s - loss: 0.6668 - acc: 0.58 - ETA: 12s - loss: 0.6668 - acc: 0.58 - ETA: 12s - loss: 0.6668 - acc: 0.58 - ETA: 12s - loss: 0.6668 - acc: 0.58 - ETA: 12s - loss: 0.6668 - acc: 0.58 - ETA: 12s - loss: 0.6668 - acc: 0.58 - ETA: 11s - loss: 0.6668 - acc: 0.58 - ETA: 11s - loss: 0.6668 - acc: 0.58 - ETA: 11s - loss: 0.6668 - acc: 0.58 - ETA: 11s - loss: 0.6668 - acc: 0.58 - ETA: 11s - loss: 0.6667 - acc: 0.58 - ETA: 11s - loss: 0.6667 - acc: 0.58 - ETA: 11s - loss: 0.6667 - acc: 0.58 - ETA: 11s - loss: 0.6667 - acc: 0.58 - ETA: 11s - loss: 0.6667 - acc: 0.58 - ETA: 11s - loss: 0.6667 - acc: 0.58 - ETA: 11s - loss: 0.6667 - acc: 0.58 - ETA: 11s - loss: 0.6667 - acc: 0.59 - ETA: 11s - loss: 0.6667 - acc: 0.58 - ETA: 11s - loss: 0.6667 - acc: 0.58 - ETA: 10s - loss: 0.6667 - acc: 0.58 - ETA: 10s - loss: 0.6667 - acc: 0.58 - ETA: 10s - loss: 0.6667 - acc: 0.58 - ETA: 10s - loss: 0.6667 - acc: 0.58 - ETA: 10s - loss: 0.6667 - acc: 0.58 - ETA: 10s - loss: 0.6667 - acc: 0.58 - ETA: 10s - loss: 0.6667 - acc: 0.58 - ETA: 10s - loss: 0.6667 - acc: 0.59 - ETA: 10s - loss: 0.6667 - acc: 0.59 - ETA: 10s - loss: 0.6667 - acc: 0.59 - ETA: 10s - loss: 0.6667 - acc: 0.59 - ETA: 10s - loss: 0.6667 - acc: 0.59 - ETA: 10s - loss: 0.6667 - acc: 0.59 - ETA: 10s - loss: 0.6667 - acc: 0.59 - ETA: 10s - loss: 0.6666 - acc: 0.59 - ETA: 9s - loss: 0.6666 - acc: 0.5903 - ETA: 9s - loss: 0.6667 - acc: 0.590 - ETA: 9s - loss: 0.6667 - acc: 0.590 - ETA: 9s - loss: 0.6667 - acc: 0.590 - ETA: 9s - loss: 0.6667 - acc: 0.590 - ETA: 9s - loss: 0.6667 - acc: 0.590 - ETA: 9s - loss: 0.6667 - acc: 0.590 - ETA: 9s - loss: 0.6667 - acc: 0.590 - ETA: 9s - loss: 0.6667 - acc: 0.590 - ETA: 9s - loss: 0.6666 - acc: 0.590 - ETA: 9s - loss: 0.6666 - acc: 0.590 - ETA: 9s - loss: 0.6667 - acc: 0.590 - ETA: 9s - loss: 0.6666 - acc: 0.590 - ETA: 9s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6666 - acc: 0.590 - ETA: 8s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6666 - acc: 0.590 - ETA: 8s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6668 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 6s - loss: 0.6667 - acc: 0.590 - ETA: 6s - loss: 0.6667 - acc: 0.590 - ETA: 6s - loss: 0.6667 - acc: 0.590 - ETA: 6s - loss: 0.6668 - acc: 0.590 - ETA: 6s - loss: 0.6668 - acc: 0.590 - ETA: 6s - loss: 0.6668 - acc: 0.590 - ETA: 6s - loss: 0.6668 - acc: 0.590 - ETA: 6s - loss: 0.6668 - acc: 0.590 - ETA: 6s - loss: 0.6668 - acc: 0.590 - ETA: 6s - loss: 0.6668 - acc: 0.590 - ETA: 6s - loss: 0.6668 - acc: 0.590 - ETA: 6s - loss: 0.6668 - acc: 0.590 - ETA: 6s - loss: 0.6668 - acc: 0.590 - ETA: 6s - loss: 0.6668 - acc: 0.590 - ETA: 5s - loss: 0.6668 - acc: 0.590 - ETA: 5s - loss: 0.6668 - acc: 0.590 - ETA: 5s - loss: 0.6668 - acc: 0.590 - ETA: 5s - loss: 0.6668 - acc: 0.590 - ETA: 5s - loss: 0.6668 - acc: 0.590 - ETA: 5s - loss: 0.6668 - acc: 0.590 - ETA: 5s - loss: 0.6668 - acc: 0.590 - ETA: 5s - loss: 0.6668 - acc: 0.590 - ETA: 5s - loss: 0.6668 - acc: 0.590 - ETA: 5s - loss: 0.6668 - acc: 0.590 - ETA: 5s - loss: 0.6668 - acc: 0.590 - ETA: 5s - loss: 0.6668 - acc: 0.590 - ETA: 5s - loss: 0.6668 - acc: 0.590 - ETA: 5s - loss: 0.6668 - acc: 0.590 - ETA: 5s - loss: 0.6668 - acc: 0.590 - ETA: 4s - loss: 0.6668 - acc: 0.590 - ETA: 4s - loss: 0.6668 - acc: 0.590 - ETA: 4s - loss: 0.6669 - acc: 0.590 - ETA: 4s - loss: 0.6669 - acc: 0.590 - ETA: 4s - loss: 0.6669 - acc: 0.589 - ETA: 4s - loss: 0.6669 - acc: 0.590 - ETA: 4s - loss: 0.6668 - acc: 0.590 - ETA: 4s - loss: 0.6668 - acc: 0.590 - ETA: 4s - loss: 0.6668 - acc: 0.590 - ETA: 4s - loss: 0.6668 - acc: 0.590 - ETA: 4s - loss: 0.6668 - acc: 0.590 - ETA: 4s - loss: 0.6668 - acc: 0.590 - ETA: 4s - loss: 0.6668 - acc: 0.590 - ETA: 4s - loss: 0.6668 - acc: 0.590 - ETA: 4s - loss: 0.6668 - acc: 0.590 - ETA: 3s - loss: 0.6668 - acc: 0.590 - ETA: 3s - loss: 0.6668 - acc: 0.590 - ETA: 3s - loss: 0.6668 - acc: 0.590 - ETA: 3s - loss: 0.6668 - acc: 0.589 - ETA: 3s - loss: 0.6668 - acc: 0.589 - ETA: 3s - loss: 0.6668 - acc: 0.589 - ETA: 3s - loss: 0.6668 - acc: 0.589 - ETA: 3s - loss: 0.6668 - acc: 0.589 - ETA: 3s - loss: 0.6668 - acc: 0.589 - ETA: 3s - loss: 0.6669 - acc: 0.589 - ETA: 3s - loss: 0.6669 - acc: 0.589 - ETA: 3s - loss: 0.6669 - acc: 0.589 - ETA: 3s - loss: 0.6668 - acc: 0.589 - ETA: 3s - loss: 0.6668 - acc: 0.590 - ETA: 2s - loss: 0.6668 - acc: 0.590 - ETA: 2s - loss: 0.6668 - acc: 0.590 - ETA: 2s - loss: 0.6668 - acc: 0.590 - ETA: 2s - loss: 0.6669 - acc: 0.590 - ETA: 2s - loss: 0.6668 - acc: 0.590 - ETA: 2s - loss: 0.6669 - acc: 0.589 - ETA: 2s - loss: 0.6669 - acc: 0.589 - ETA: 2s - loss: 0.6669 - acc: 0.589 - ETA: 2s - loss: 0.6668 - acc: 0.589 - ETA: 2s - loss: 0.6668 - acc: 0.59002497802/2497802 [==============================] - ETA: 2s - loss: 0.6668 - acc: 0.589 - ETA: 2s - loss: 0.6668 - acc: 0.589 - ETA: 2s - loss: 0.6668 - acc: 0.590 - ETA: 2s - loss: 0.6668 - acc: 0.589 - ETA: 2s - loss: 0.6668 - acc: 0.590 - ETA: 1s - loss: 0.6668 - acc: 0.590 - ETA: 1s - loss: 0.6668 - acc: 0.590 - ETA: 1s - loss: 0.6668 - acc: 0.590 - ETA: 1s - loss: 0.6668 - acc: 0.590 - ETA: 1s - loss: 0.6668 - acc: 0.590 - ETA: 1s - loss: 0.6668 - acc: 0.590 - ETA: 1s - loss: 0.6668 - acc: 0.590 - ETA: 1s - loss: 0.6668 - acc: 0.590 - ETA: 1s - loss: 0.6668 - acc: 0.590 - ETA: 1s - loss: 0.6668 - acc: 0.590 - ETA: 1s - loss: 0.6668 - acc: 0.590 - ETA: 1s - loss: 0.6668 - acc: 0.590 - ETA: 1s - loss: 0.6668 - acc: 0.590 - ETA: 1s - loss: 0.6668 - acc: 0.590 - ETA: 1s - loss: 0.6668 - acc: 0.590 - ETA: 0s - loss: 0.6668 - acc: 0.590 - ETA: 0s - loss: 0.6668 - acc: 0.590 - ETA: 0s - loss: 0.6668 - acc: 0.590 - ETA: 0s - loss: 0.6668 - acc: 0.590 - ETA: 0s - loss: 0.6668 - acc: 0.590 - ETA: 0s - loss: 0.6668 - acc: 0.590 - ETA: 0s - loss: 0.6668 - acc: 0.590 - ETA: 0s - loss: 0.6668 - acc: 0.590 - ETA: 0s - loss: 0.6668 - acc: 0.590 - ETA: 0s - loss: 0.6668 - acc: 0.590 - ETA: 0s - loss: 0.6669 - acc: 0.590 - ETA: 0s - loss: 0.6669 - acc: 0.590 - ETA: 0s - loss: 0.6669 - acc: 0.590 - ETA: 0s - loss: 0.6669 - acc: 0.590 - 18s 7us/step - loss: 0.6669 - acc: 0.5900 - val_loss: 0.6546 - val_acc: 0.6231 Epoch 12/100 2150000/2497802 [========================>.....] - ETA: 40s - loss: 0.6640 - acc: 0.59 - ETA: 28s - loss: 0.6669 - acc: 0.59 - ETA: 23s - loss: 0.6669 - acc: 0.58 - ETA: 21s - loss: 0.6668 - acc: 0.58 - ETA: 20s - loss: 0.6663 - acc: 0.59 - ETA: 19s - loss: 0.6670 - acc: 0.58 - ETA: 19s - loss: 0.6668 - acc: 0.58 - ETA: 18s - loss: 0.6666 - acc: 0.58 - ETA: 18s - loss: 0.6666 - acc: 0.58 - ETA: 18s - loss: 0.6667 - acc: 0.58 - ETA: 18s - loss: 0.6667 - acc: 0.58 - ETA: 18s - loss: 0.6669 - acc: 0.58 - ETA: 17s - loss: 0.6670 - acc: 0.58 - ETA: 17s - loss: 0.6670 - acc: 0.58 - ETA: 17s - loss: 0.6671 - acc: 0.58 - ETA: 17s - loss: 0.6673 - acc: 0.58 - ETA: 17s - loss: 0.6672 - acc: 0.58 - ETA: 16s - loss: 0.6673 - acc: 0.58 - ETA: 16s - loss: 0.6672 - acc: 0.58 - ETA: 16s - loss: 0.6670 - acc: 0.58 - ETA: 16s - loss: 0.6669 - acc: 0.58 - ETA: 16s - loss: 0.6669 - acc: 0.58 - ETA: 16s - loss: 0.6672 - acc: 0.58 - ETA: 16s - loss: 0.6675 - acc: 0.58 - ETA: 16s - loss: 0.6674 - acc: 0.58 - ETA: 16s - loss: 0.6675 - acc: 0.58 - ETA: 15s - loss: 0.6673 - acc: 0.58 - ETA: 15s - loss: 0.6673 - acc: 0.58 - ETA: 15s - loss: 0.6672 - acc: 0.58 - ETA: 15s - loss: 0.6673 - acc: 0.58 - ETA: 15s - loss: 0.6673 - acc: 0.58 - ETA: 15s - loss: 0.6673 - acc: 0.58 - ETA: 15s - loss: 0.6673 - acc: 0.58 - ETA: 15s - loss: 0.6673 - acc: 0.58 - ETA: 15s - loss: 0.6672 - acc: 0.58 - ETA: 15s - loss: 0.6671 - acc: 0.58 - ETA: 14s - loss: 0.6671 - acc: 0.58 - ETA: 14s - loss: 0.6670 - acc: 0.58 - ETA: 14s - loss: 0.6670 - acc: 0.58 - ETA: 14s - loss: 0.6670 - acc: 0.58 - ETA: 14s - loss: 0.6670 - acc: 0.58 - ETA: 14s - loss: 0.6670 - acc: 0.58 - ETA: 14s - loss: 0.6669 - acc: 0.58 - ETA: 14s - loss: 0.6669 - acc: 0.58 - ETA: 14s - loss: 0.6669 - acc: 0.58 - ETA: 14s - loss: 0.6668 - acc: 0.58 - ETA: 14s - loss: 0.6668 - acc: 0.58 - ETA: 14s - loss: 0.6669 - acc: 0.58 - ETA: 13s - loss: 0.6669 - acc: 0.58 - ETA: 13s - loss: 0.6669 - acc: 0.58 - ETA: 13s - loss: 0.6670 - acc: 0.58 - ETA: 13s - loss: 0.6669 - acc: 0.58 - ETA: 13s - loss: 0.6669 - acc: 0.58 - ETA: 13s - loss: 0.6668 - acc: 0.58 - ETA: 13s - loss: 0.6668 - acc: 0.58 - ETA: 13s - loss: 0.6669 - acc: 0.58 - ETA: 13s - loss: 0.6668 - acc: 0.58 - ETA: 13s - loss: 0.6668 - acc: 0.58 - ETA: 13s - loss: 0.6668 - acc: 0.58 - ETA: 13s - loss: 0.6668 - acc: 0.58 - ETA: 13s - loss: 0.6669 - acc: 0.58 - ETA: 13s - loss: 0.6668 - acc: 0.58 - ETA: 12s - loss: 0.6669 - acc: 0.58 - ETA: 12s - loss: 0.6669 - acc: 0.58 - ETA: 12s - loss: 0.6669 - acc: 0.58 - ETA: 12s - loss: 0.6669 - acc: 0.58 - ETA: 12s - loss: 0.6670 - acc: 0.58 - ETA: 12s - loss: 0.6670 - acc: 0.58 - ETA: 12s - loss: 0.6670 - acc: 0.58 - ETA: 12s - loss: 0.6670 - acc: 0.58 - ETA: 12s - loss: 0.6670 - acc: 0.58 - ETA: 12s - loss: 0.6670 - acc: 0.58 - ETA: 12s - loss: 0.6670 - acc: 0.58 - ETA: 12s - loss: 0.6670 - acc: 0.58 - ETA: 12s - loss: 0.6670 - acc: 0.58 - ETA: 12s - loss: 0.6670 - acc: 0.58 - ETA: 11s - loss: 0.6669 - acc: 0.58 - ETA: 11s - loss: 0.6670 - acc: 0.58 - ETA: 11s - loss: 0.6670 - acc: 0.58 - ETA: 11s - loss: 0.6670 - acc: 0.58 - ETA: 11s - loss: 0.6670 - acc: 0.58 - ETA: 11s - loss: 0.6669 - acc: 0.58 - ETA: 11s - loss: 0.6669 - acc: 0.58 - ETA: 11s - loss: 0.6669 - acc: 0.58 - ETA: 11s - loss: 0.6669 - acc: 0.58 - ETA: 11s - loss: 0.6669 - acc: 0.58 - ETA: 11s - loss: 0.6669 - acc: 0.58 - ETA: 11s - loss: 0.6670 - acc: 0.58 - ETA: 11s - loss: 0.6670 - acc: 0.58 - ETA: 11s - loss: 0.6670 - acc: 0.58 - ETA: 10s - loss: 0.6670 - acc: 0.58 - ETA: 10s - loss: 0.6669 - acc: 0.58 - ETA: 10s - loss: 0.6670 - acc: 0.58 - ETA: 10s - loss: 0.6670 - acc: 0.58 - ETA: 10s - loss: 0.6670 - acc: 0.58 - ETA: 10s - loss: 0.6670 - acc: 0.58 - ETA: 10s - loss: 0.6670 - acc: 0.58 - ETA: 10s - loss: 0.6670 - acc: 0.58 - ETA: 10s - loss: 0.6670 - acc: 0.58 - ETA: 10s - loss: 0.6670 - acc: 0.58 - ETA: 10s - loss: 0.6670 - acc: 0.58 - ETA: 10s - loss: 0.6670 - acc: 0.58 - ETA: 10s - loss: 0.6670 - acc: 0.58 - ETA: 10s - loss: 0.6671 - acc: 0.58 - ETA: 9s - loss: 0.6671 - acc: 0.5894 - ETA: 9s - loss: 0.6671 - acc: 0.589 - ETA: 9s - loss: 0.6671 - acc: 0.589 - ETA: 9s - loss: 0.6670 - acc: 0.589 - ETA: 9s - loss: 0.6670 - acc: 0.589 - ETA: 9s - loss: 0.6671 - acc: 0.589 - ETA: 9s - loss: 0.6671 - acc: 0.589 - ETA: 9s - loss: 0.6671 - acc: 0.589 - ETA: 9s - loss: 0.6671 - acc: 0.589 - ETA: 9s - loss: 0.6671 - acc: 0.589 - ETA: 9s - loss: 0.6671 - acc: 0.589 - ETA: 9s - loss: 0.6671 - acc: 0.589 - ETA: 9s - loss: 0.6671 - acc: 0.589 - ETA: 9s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6672 - acc: 0.589 - ETA: 8s - loss: 0.6672 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6672 - acc: 0.589 - ETA: 7s - loss: 0.6672 - acc: 0.589 - ETA: 7s - loss: 0.6672 - acc: 0.589 - ETA: 7s - loss: 0.6672 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 7s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6670 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6670 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 6s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6670 - acc: 0.589 - ETA: 5s - loss: 0.6671 - acc: 0.589 - ETA: 5s - loss: 0.6670 - acc: 0.589 - ETA: 5s - loss: 0.6670 - acc: 0.589 - ETA: 4s - loss: 0.6671 - acc: 0.589 - ETA: 4s - loss: 0.6671 - acc: 0.589 - ETA: 4s - loss: 0.6671 - acc: 0.589 - ETA: 4s - loss: 0.6671 - acc: 0.589 - ETA: 4s - loss: 0.6670 - acc: 0.589 - ETA: 4s - loss: 0.6670 - acc: 0.589 - ETA: 4s - loss: 0.6670 - acc: 0.589 - ETA: 4s - loss: 0.6670 - acc: 0.589 - ETA: 4s - loss: 0.6670 - acc: 0.589 - ETA: 4s - loss: 0.6670 - acc: 0.589 - ETA: 4s - loss: 0.6671 - acc: 0.589 - ETA: 4s - loss: 0.6670 - acc: 0.589 - ETA: 4s - loss: 0.6670 - acc: 0.589 - ETA: 4s - loss: 0.6670 - acc: 0.589 - ETA: 4s - loss: 0.6670 - acc: 0.589 - ETA: 3s - loss: 0.6670 - acc: 0.589 - ETA: 3s - loss: 0.6670 - acc: 0.589 - ETA: 3s - loss: 0.6670 - acc: 0.589 - ETA: 3s - loss: 0.6670 - acc: 0.589 - ETA: 3s - loss: 0.6670 - acc: 0.589 - ETA: 3s - loss: 0.6670 - acc: 0.589 - ETA: 3s - loss: 0.6670 - acc: 0.589 - ETA: 3s - loss: 0.6670 - acc: 0.589 - ETA: 3s - loss: 0.6670 - acc: 0.589 - ETA: 3s - loss: 0.6670 - acc: 0.589 - ETA: 3s - loss: 0.6670 - acc: 0.589 - ETA: 3s - loss: 0.6670 - acc: 0.589 - ETA: 3s - loss: 0.6670 - acc: 0.589 - ETA: 3s - loss: 0.6670 - acc: 0.589 - ETA: 3s - loss: 0.6670 - acc: 0.589 - ETA: 2s - loss: 0.6670 - acc: 0.589 - ETA: 2s - loss: 0.6670 - acc: 0.589 - ETA: 2s - loss: 0.6670 - acc: 0.589 - ETA: 2s - loss: 0.6669 - acc: 0.589 - ETA: 2s - loss: 0.6669 - acc: 0.589 - ETA: 2s - loss: 0.6669 - acc: 0.589 - ETA: 2s - loss: 0.6669 - acc: 0.589 - ETA: 2s - loss: 0.6670 - acc: 0.589 - ETA: 2s - loss: 0.6669 - acc: 0.58952497802/2497802 [==============================] - ETA: 2s - loss: 0.6670 - acc: 0.589 - ETA: 2s - loss: 0.6670 - acc: 0.589 - ETA: 2s - loss: 0.6670 - acc: 0.589 - ETA: 2s - loss: 0.6669 - acc: 0.589 - ETA: 2s - loss: 0.6669 - acc: 0.589 - ETA: 1s - loss: 0.6670 - acc: 0.589 - ETA: 1s - loss: 0.6669 - acc: 0.589 - ETA: 1s - loss: 0.6669 - acc: 0.589 - ETA: 1s - loss: 0.6669 - acc: 0.589 - ETA: 1s - loss: 0.6669 - acc: 0.589 - ETA: 1s - loss: 0.6669 - acc: 0.589 - ETA: 1s - loss: 0.6669 - acc: 0.589 - ETA: 1s - loss: 0.6669 - acc: 0.589 - ETA: 1s - loss: 0.6669 - acc: 0.589 - ETA: 1s - loss: 0.6669 - acc: 0.589 - ETA: 1s - loss: 0.6669 - acc: 0.589 - ETA: 1s - loss: 0.6669 - acc: 0.589 - ETA: 1s - loss: 0.6669 - acc: 0.589 - ETA: 1s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - 18s 7us/step - loss: 0.6669 - acc: 0.5897 - val_loss: 0.6538 - val_acc: 0.6234 Epoch 00012: early stopping Wall time: 3min 33s
<keras.callbacks.History at 0x26879bcb3c8>
%%time
# Fit the model
#classifier.fit(X_train, y_train, epochs=200, batch_size=500,validation_data=(X_val, y_val),callbacks=callbacks1)
Wall time: 0 ns
%%time
# Fit the model
#classifier.fit(X_train, y_train, epochs=1000, batch_size=5000,validation_data=(X_val, y_val),callbacks=callbacks1)
Wall time: 0 ns
%%time
# Fit the model
classifier.fit(X_train, y_train, epochs=1000, batch_size=50000,validation_data=(X_val, y_val),callbacks=callbacks2)
Train on 2497802 samples, validate on 624451 samples Epoch 1/1000 2497802/2497802 [==============================] - ETA: 17s - loss: 0.6679 - acc: 0.58 - ETA: 15s - loss: 0.6672 - acc: 0.58 - ETA: 14s - loss: 0.6670 - acc: 0.59 - ETA: 14s - loss: 0.6672 - acc: 0.59 - ETA: 13s - loss: 0.6672 - acc: 0.59 - ETA: 13s - loss: 0.6672 - acc: 0.58 - ETA: 12s - loss: 0.6674 - acc: 0.58 - ETA: 12s - loss: 0.6673 - acc: 0.58 - ETA: 12s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6673 - acc: 0.58 - ETA: 11s - loss: 0.6673 - acc: 0.58 - ETA: 11s - loss: 0.6673 - acc: 0.58 - ETA: 11s - loss: 0.6673 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 10s - loss: 0.6672 - acc: 0.58 - ETA: 9s - loss: 0.6671 - acc: 0.5899 - ETA: 9s - loss: 0.6671 - acc: 0.589 - ETA: 9s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6671 - acc: 0.589 - ETA: 8s - loss: 0.6670 - acc: 0.589 - ETA: 8s - loss: 0.6670 - acc: 0.589 - ETA: 7s - loss: 0.6669 - acc: 0.589 - ETA: 7s - loss: 0.6670 - acc: 0.589 - ETA: 7s - loss: 0.6669 - acc: 0.589 - ETA: 6s - loss: 0.6669 - acc: 0.589 - ETA: 6s - loss: 0.6669 - acc: 0.589 - ETA: 6s - loss: 0.6669 - acc: 0.589 - ETA: 5s - loss: 0.6669 - acc: 0.589 - ETA: 5s - loss: 0.6669 - acc: 0.589 - ETA: 5s - loss: 0.6669 - acc: 0.589 - ETA: 5s - loss: 0.6669 - acc: 0.589 - ETA: 4s - loss: 0.6670 - acc: 0.589 - ETA: 4s - loss: 0.6669 - acc: 0.589 - ETA: 4s - loss: 0.6669 - acc: 0.589 - ETA: 3s - loss: 0.6669 - acc: 0.589 - ETA: 3s - loss: 0.6669 - acc: 0.589 - ETA: 3s - loss: 0.6669 - acc: 0.589 - ETA: 2s - loss: 0.6669 - acc: 0.589 - ETA: 2s - loss: 0.6669 - acc: 0.589 - ETA: 2s - loss: 0.6669 - acc: 0.589 - ETA: 2s - loss: 0.6669 - acc: 0.589 - ETA: 1s - loss: 0.6668 - acc: 0.589 - ETA: 1s - loss: 0.6668 - acc: 0.589 - ETA: 1s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6668 - acc: 0.589 - 16s 6us/step - loss: 0.6668 - acc: 0.5900 - val_loss: 0.6539 - val_acc: 0.6235 Epoch 2/1000 2497802/2497802 [==============================] - ETA: 19s - loss: 0.6661 - acc: 0.59 - ETA: 16s - loss: 0.6662 - acc: 0.59 - ETA: 15s - loss: 0.6661 - acc: 0.59 - ETA: 14s - loss: 0.6657 - acc: 0.59 - ETA: 14s - loss: 0.6659 - acc: 0.59 - ETA: 13s - loss: 0.6661 - acc: 0.59 - ETA: 13s - loss: 0.6661 - acc: 0.59 - ETA: 13s - loss: 0.6660 - acc: 0.59 - ETA: 12s - loss: 0.6661 - acc: 0.59 - ETA: 12s - loss: 0.6661 - acc: 0.59 - ETA: 11s - loss: 0.6660 - acc: 0.59 - ETA: 11s - loss: 0.6661 - acc: 0.59 - ETA: 11s - loss: 0.6662 - acc: 0.59 - ETA: 10s - loss: 0.6662 - acc: 0.59 - ETA: 10s - loss: 0.6663 - acc: 0.59 - ETA: 10s - loss: 0.6665 - acc: 0.59 - ETA: 9s - loss: 0.6666 - acc: 0.5904 - ETA: 9s - loss: 0.6667 - acc: 0.590 - ETA: 9s - loss: 0.6668 - acc: 0.590 - ETA: 9s - loss: 0.6668 - acc: 0.590 - ETA: 8s - loss: 0.6669 - acc: 0.590 - ETA: 8s - loss: 0.6668 - acc: 0.590 - ETA: 8s - loss: 0.6668 - acc: 0.590 - ETA: 8s - loss: 0.6669 - acc: 0.590 - ETA: 7s - loss: 0.6669 - acc: 0.590 - ETA: 7s - loss: 0.6669 - acc: 0.590 - ETA: 7s - loss: 0.6669 - acc: 0.590 - ETA: 6s - loss: 0.6669 - acc: 0.590 - ETA: 6s - loss: 0.6669 - acc: 0.590 - ETA: 6s - loss: 0.6669 - acc: 0.590 - ETA: 5s - loss: 0.6669 - acc: 0.590 - ETA: 5s - loss: 0.6669 - acc: 0.590 - ETA: 5s - loss: 0.6669 - acc: 0.590 - ETA: 4s - loss: 0.6669 - acc: 0.590 - ETA: 4s - loss: 0.6669 - acc: 0.590 - ETA: 4s - loss: 0.6669 - acc: 0.590 - ETA: 3s - loss: 0.6669 - acc: 0.590 - ETA: 3s - loss: 0.6669 - acc: 0.590 - ETA: 3s - loss: 0.6670 - acc: 0.590 - ETA: 2s - loss: 0.6670 - acc: 0.590 - ETA: 2s - loss: 0.6669 - acc: 0.590 - ETA: 2s - loss: 0.6669 - acc: 0.590 - ETA: 2s - loss: 0.6669 - acc: 0.590 - ETA: 1s - loss: 0.6669 - acc: 0.590 - ETA: 1s - loss: 0.6669 - acc: 0.590 - ETA: 1s - loss: 0.6670 - acc: 0.590 - ETA: 0s - loss: 0.6670 - acc: 0.590 - ETA: 0s - loss: 0.6670 - acc: 0.590 - ETA: 0s - loss: 0.6670 - acc: 0.590 - 16s 6us/step - loss: 0.6669 - acc: 0.5900 - val_loss: 0.6542 - val_acc: 0.6232 Epoch 3/1000 2497802/2497802 [==============================] - ETA: 19s - loss: 0.6676 - acc: 0.58 - ETA: 15s - loss: 0.6674 - acc: 0.58 - ETA: 14s - loss: 0.6676 - acc: 0.58 - ETA: 14s - loss: 0.6674 - acc: 0.58 - ETA: 13s - loss: 0.6674 - acc: 0.58 - ETA: 13s - loss: 0.6673 - acc: 0.58 - ETA: 13s - loss: 0.6670 - acc: 0.58 - ETA: 12s - loss: 0.6670 - acc: 0.58 - ETA: 12s - loss: 0.6671 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6671 - acc: 0.58 - ETA: 10s - loss: 0.6671 - acc: 0.58 - ETA: 10s - loss: 0.6670 - acc: 0.58 - ETA: 10s - loss: 0.6670 - acc: 0.58 - ETA: 9s - loss: 0.6670 - acc: 0.5897 - ETA: 9s - loss: 0.6669 - acc: 0.589 - ETA: 9s - loss: 0.6669 - acc: 0.589 - ETA: 8s - loss: 0.6668 - acc: 0.589 - ETA: 8s - loss: 0.6668 - acc: 0.589 - ETA: 8s - loss: 0.6667 - acc: 0.589 - ETA: 8s - loss: 0.6667 - acc: 0.589 - ETA: 7s - loss: 0.6667 - acc: 0.589 - ETA: 7s - loss: 0.6667 - acc: 0.589 - ETA: 7s - loss: 0.6667 - acc: 0.589 - ETA: 6s - loss: 0.6667 - acc: 0.589 - ETA: 6s - loss: 0.6667 - acc: 0.589 - ETA: 6s - loss: 0.6667 - acc: 0.589 - ETA: 6s - loss: 0.6668 - acc: 0.589 - ETA: 5s - loss: 0.6667 - acc: 0.589 - ETA: 5s - loss: 0.6667 - acc: 0.589 - ETA: 5s - loss: 0.6667 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.590 - ETA: 3s - loss: 0.6666 - acc: 0.590 - ETA: 3s - loss: 0.6666 - acc: 0.590 - ETA: 3s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6667 - acc: 0.590 - ETA: 1s - loss: 0.6667 - acc: 0.590 - ETA: 1s - loss: 0.6667 - acc: 0.590 - ETA: 1s - loss: 0.6667 - acc: 0.590 - ETA: 0s - loss: 0.6667 - acc: 0.590 - ETA: 0s - loss: 0.6667 - acc: 0.590 - ETA: 0s - loss: 0.6667 - acc: 0.590 - 15s 6us/step - loss: 0.6667 - acc: 0.5900 - val_loss: 0.6541 - val_acc: 0.6235 Epoch 4/1000 2497802/2497802 [==============================] - ETA: 18s - loss: 0.6671 - acc: 0.58 - ETA: 15s - loss: 0.6668 - acc: 0.58 - ETA: 15s - loss: 0.6667 - acc: 0.58 - ETA: 14s - loss: 0.6668 - acc: 0.58 - ETA: 14s - loss: 0.6667 - acc: 0.58 - ETA: 13s - loss: 0.6670 - acc: 0.58 - ETA: 13s - loss: 0.6669 - acc: 0.58 - ETA: 12s - loss: 0.6669 - acc: 0.58 - ETA: 12s - loss: 0.6669 - acc: 0.58 - ETA: 12s - loss: 0.6668 - acc: 0.58 - ETA: 11s - loss: 0.6668 - acc: 0.58 - ETA: 11s - loss: 0.6666 - acc: 0.59 - ETA: 11s - loss: 0.6666 - acc: 0.59 - ETA: 10s - loss: 0.6667 - acc: 0.59 - ETA: 10s - loss: 0.6667 - acc: 0.59 - ETA: 10s - loss: 0.6667 - acc: 0.59 - ETA: 10s - loss: 0.6666 - acc: 0.59 - ETA: 9s - loss: 0.6666 - acc: 0.5903 - ETA: 9s - loss: 0.6666 - acc: 0.590 - ETA: 8s - loss: 0.6666 - acc: 0.590 - ETA: 8s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6666 - acc: 0.590 - ETA: 8s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.589 - ETA: 6s - loss: 0.6667 - acc: 0.589 - ETA: 6s - loss: 0.6667 - acc: 0.589 - ETA: 6s - loss: 0.6667 - acc: 0.590 - ETA: 5s - loss: 0.6667 - acc: 0.590 - ETA: 5s - loss: 0.6667 - acc: 0.590 - ETA: 5s - loss: 0.6668 - acc: 0.590 - ETA: 4s - loss: 0.6668 - acc: 0.589 - ETA: 4s - loss: 0.6668 - acc: 0.589 - ETA: 4s - loss: 0.6668 - acc: 0.590 - ETA: 4s - loss: 0.6667 - acc: 0.590 - ETA: 3s - loss: 0.6668 - acc: 0.590 - ETA: 3s - loss: 0.6667 - acc: 0.590 - ETA: 3s - loss: 0.6668 - acc: 0.590 - ETA: 2s - loss: 0.6667 - acc: 0.590 - ETA: 2s - loss: 0.6667 - acc: 0.590 - ETA: 2s - loss: 0.6667 - acc: 0.590 - ETA: 2s - loss: 0.6667 - acc: 0.590 - ETA: 1s - loss: 0.6667 - acc: 0.590 - ETA: 1s - loss: 0.6667 - acc: 0.590 - ETA: 1s - loss: 0.6667 - acc: 0.590 - ETA: 0s - loss: 0.6667 - acc: 0.590 - ETA: 0s - loss: 0.6667 - acc: 0.590 - ETA: 0s - loss: 0.6667 - acc: 0.590 - 16s 6us/step - loss: 0.6667 - acc: 0.5903 - val_loss: 0.6539 - val_acc: 0.6233 Epoch 5/1000 2497802/2497802 [==============================] - ETA: 16s - loss: 0.6664 - acc: 0.59 - ETA: 16s - loss: 0.6670 - acc: 0.58 - ETA: 15s - loss: 0.6671 - acc: 0.58 - ETA: 14s - loss: 0.6670 - acc: 0.58 - ETA: 13s - loss: 0.6669 - acc: 0.58 - ETA: 13s - loss: 0.6666 - acc: 0.59 - ETA: 13s - loss: 0.6666 - acc: 0.59 - ETA: 12s - loss: 0.6664 - acc: 0.59 - ETA: 12s - loss: 0.6663 - acc: 0.59 - ETA: 12s - loss: 0.6665 - acc: 0.59 - ETA: 11s - loss: 0.6666 - acc: 0.59 - ETA: 11s - loss: 0.6665 - acc: 0.59 - ETA: 10s - loss: 0.6665 - acc: 0.59 - ETA: 10s - loss: 0.6664 - acc: 0.59 - ETA: 10s - loss: 0.6665 - acc: 0.59 - ETA: 9s - loss: 0.6665 - acc: 0.5908 - ETA: 9s - loss: 0.6665 - acc: 0.590 - ETA: 9s - loss: 0.6666 - acc: 0.590 - ETA: 8s - loss: 0.6665 - acc: 0.590 - ETA: 8s - loss: 0.6665 - acc: 0.590 - ETA: 8s - loss: 0.6665 - acc: 0.590 - ETA: 8s - loss: 0.6665 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6665 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6665 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6664 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 1s - loss: 0.6665 - acc: 0.590 - ETA: 1s - loss: 0.6665 - acc: 0.590 - ETA: 1s - loss: 0.6665 - acc: 0.590 - ETA: 0s - loss: 0.6665 - acc: 0.590 - ETA: 0s - loss: 0.6665 - acc: 0.590 - ETA: 0s - loss: 0.6665 - acc: 0.590 - 16s 6us/step - loss: 0.6665 - acc: 0.5906 - val_loss: 0.6539 - val_acc: 0.6236 Epoch 6/1000 2497802/2497802 [==============================] - ETA: 16s - loss: 0.6657 - acc: 0.58 - ETA: 14s - loss: 0.6666 - acc: 0.58 - ETA: 14s - loss: 0.6663 - acc: 0.59 - ETA: 15s - loss: 0.6665 - acc: 0.59 - ETA: 14s - loss: 0.6666 - acc: 0.59 - ETA: 13s - loss: 0.6668 - acc: 0.58 - ETA: 13s - loss: 0.6668 - acc: 0.58 - ETA: 12s - loss: 0.6667 - acc: 0.58 - ETA: 12s - loss: 0.6669 - acc: 0.58 - ETA: 11s - loss: 0.6669 - acc: 0.59 - ETA: 11s - loss: 0.6669 - acc: 0.59 - ETA: 11s - loss: 0.6668 - acc: 0.59 - ETA: 10s - loss: 0.6668 - acc: 0.59 - ETA: 10s - loss: 0.6668 - acc: 0.59 - ETA: 10s - loss: 0.6669 - acc: 0.59 - ETA: 9s - loss: 0.6669 - acc: 0.5901 - ETA: 9s - loss: 0.6669 - acc: 0.589 - ETA: 9s - loss: 0.6669 - acc: 0.589 - ETA: 8s - loss: 0.6669 - acc: 0.589 - ETA: 8s - loss: 0.6669 - acc: 0.590 - ETA: 8s - loss: 0.6668 - acc: 0.590 - ETA: 7s - loss: 0.6668 - acc: 0.590 - ETA: 7s - loss: 0.6668 - acc: 0.590 - ETA: 7s - loss: 0.6668 - acc: 0.590 - ETA: 7s - loss: 0.6668 - acc: 0.590 - ETA: 6s - loss: 0.6668 - acc: 0.590 - ETA: 6s - loss: 0.6669 - acc: 0.590 - ETA: 6s - loss: 0.6669 - acc: 0.590 - ETA: 5s - loss: 0.6669 - acc: 0.590 - ETA: 5s - loss: 0.6669 - acc: 0.589 - ETA: 5s - loss: 0.6669 - acc: 0.590 - ETA: 5s - loss: 0.6669 - acc: 0.589 - ETA: 4s - loss: 0.6669 - acc: 0.589 - ETA: 4s - loss: 0.6669 - acc: 0.589 - ETA: 4s - loss: 0.6669 - acc: 0.589 - ETA: 3s - loss: 0.6669 - acc: 0.589 - ETA: 3s - loss: 0.6669 - acc: 0.589 - ETA: 3s - loss: 0.6669 - acc: 0.589 - ETA: 3s - loss: 0.6669 - acc: 0.589 - ETA: 2s - loss: 0.6669 - acc: 0.589 - ETA: 2s - loss: 0.6670 - acc: 0.589 - ETA: 2s - loss: 0.6669 - acc: 0.589 - ETA: 1s - loss: 0.6669 - acc: 0.589 - ETA: 1s - loss: 0.6669 - acc: 0.589 - ETA: 1s - loss: 0.6669 - acc: 0.590 - ETA: 1s - loss: 0.6669 - acc: 0.590 - ETA: 0s - loss: 0.6669 - acc: 0.590 - ETA: 0s - loss: 0.6669 - acc: 0.589 - ETA: 0s - loss: 0.6669 - acc: 0.589 - 15s 6us/step - loss: 0.6669 - acc: 0.5899 - val_loss: 0.6541 - val_acc: 0.6236 Epoch 7/1000 2497802/2497802 [==============================] - ETA: 15s - loss: 0.6660 - acc: 0.58 - ETA: 14s - loss: 0.6661 - acc: 0.58 - ETA: 13s - loss: 0.6661 - acc: 0.59 - ETA: 13s - loss: 0.6654 - acc: 0.59 - ETA: 13s - loss: 0.6659 - acc: 0.59 - ETA: 13s - loss: 0.6662 - acc: 0.59 - ETA: 13s - loss: 0.6662 - acc: 0.59 - ETA: 12s - loss: 0.6662 - acc: 0.59 - ETA: 12s - loss: 0.6662 - acc: 0.59 - ETA: 11s - loss: 0.6663 - acc: 0.59 - ETA: 11s - loss: 0.6663 - acc: 0.59 - ETA: 11s - loss: 0.6664 - acc: 0.59 - ETA: 10s - loss: 0.6665 - acc: 0.59 - ETA: 10s - loss: 0.6666 - acc: 0.59 - ETA: 10s - loss: 0.6666 - acc: 0.59 - ETA: 9s - loss: 0.6666 - acc: 0.5908 - ETA: 9s - loss: 0.6666 - acc: 0.590 - ETA: 9s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6668 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6668 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 6s - loss: 0.6667 - acc: 0.590 - ETA: 6s - loss: 0.6667 - acc: 0.590 - ETA: 6s - loss: 0.6668 - acc: 0.590 - ETA: 6s - loss: 0.6668 - acc: 0.590 - ETA: 5s - loss: 0.6668 - acc: 0.590 - ETA: 5s - loss: 0.6668 - acc: 0.590 - ETA: 5s - loss: 0.6668 - acc: 0.590 - ETA: 4s - loss: 0.6668 - acc: 0.590 - ETA: 4s - loss: 0.6667 - acc: 0.590 - ETA: 4s - loss: 0.6667 - acc: 0.590 - ETA: 4s - loss: 0.6668 - acc: 0.590 - ETA: 3s - loss: 0.6668 - acc: 0.590 - ETA: 3s - loss: 0.6668 - acc: 0.590 - ETA: 3s - loss: 0.6668 - acc: 0.590 - ETA: 2s - loss: 0.6668 - acc: 0.590 - ETA: 2s - loss: 0.6668 - acc: 0.590 - ETA: 2s - loss: 0.6667 - acc: 0.590 - ETA: 2s - loss: 0.6667 - acc: 0.590 - ETA: 1s - loss: 0.6667 - acc: 0.590 - ETA: 1s - loss: 0.6667 - acc: 0.590 - ETA: 1s - loss: 0.6667 - acc: 0.590 - ETA: 0s - loss: 0.6667 - acc: 0.590 - ETA: 0s - loss: 0.6667 - acc: 0.590 - ETA: 0s - loss: 0.6667 - acc: 0.590 - 15s 6us/step - loss: 0.6667 - acc: 0.5902 - val_loss: 0.6537 - val_acc: 0.6237 Epoch 8/1000 2497802/2497802 [==============================] - ETA: 16s - loss: 0.6645 - acc: 0.59 - ETA: 14s - loss: 0.6653 - acc: 0.59 - ETA: 14s - loss: 0.6661 - acc: 0.58 - ETA: 15s - loss: 0.6662 - acc: 0.58 - ETA: 14s - loss: 0.6662 - acc: 0.59 - ETA: 14s - loss: 0.6660 - acc: 0.59 - ETA: 13s - loss: 0.6660 - acc: 0.59 - ETA: 13s - loss: 0.6660 - acc: 0.59 - ETA: 12s - loss: 0.6660 - acc: 0.59 - ETA: 12s - loss: 0.6662 - acc: 0.59 - ETA: 12s - loss: 0.6662 - acc: 0.59 - ETA: 11s - loss: 0.6662 - acc: 0.59 - ETA: 11s - loss: 0.6662 - acc: 0.59 - ETA: 10s - loss: 0.6663 - acc: 0.59 - ETA: 10s - loss: 0.6663 - acc: 0.59 - ETA: 10s - loss: 0.6664 - acc: 0.59 - ETA: 9s - loss: 0.6664 - acc: 0.5906 - ETA: 9s - loss: 0.6664 - acc: 0.590 - ETA: 9s - loss: 0.6665 - acc: 0.590 - ETA: 8s - loss: 0.6665 - acc: 0.590 - ETA: 8s - loss: 0.6665 - acc: 0.590 - ETA: 8s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6665 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6665 - acc: 0.590 - ETA: 5s - loss: 0.6664 - acc: 0.590 - ETA: 5s - loss: 0.6664 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6667 - acc: 0.590 - 16s 6us/step - loss: 0.6666 - acc: 0.5903 - val_loss: 0.6543 - val_acc: 0.6236 Epoch 9/1000 2497802/2497802 [==============================] - ETA: 21s - loss: 0.6661 - acc: 0.58 - ETA: 18s - loss: 0.6671 - acc: 0.58 - ETA: 16s - loss: 0.6666 - acc: 0.59 - ETA: 15s - loss: 0.6668 - acc: 0.59 - ETA: 15s - loss: 0.6668 - acc: 0.58 - ETA: 14s - loss: 0.6669 - acc: 0.58 - ETA: 13s - loss: 0.6672 - acc: 0.58 - ETA: 13s - loss: 0.6672 - acc: 0.58 - ETA: 12s - loss: 0.6673 - acc: 0.58 - ETA: 12s - loss: 0.6673 - acc: 0.58 - ETA: 11s - loss: 0.6673 - acc: 0.58 - ETA: 11s - loss: 0.6672 - acc: 0.58 - ETA: 11s - loss: 0.6671 - acc: 0.58 - ETA: 11s - loss: 0.6671 - acc: 0.58 - ETA: 10s - loss: 0.6670 - acc: 0.58 - ETA: 10s - loss: 0.6669 - acc: 0.58 - ETA: 10s - loss: 0.6669 - acc: 0.58 - ETA: 9s - loss: 0.6667 - acc: 0.5895 - ETA: 9s - loss: 0.6667 - acc: 0.589 - ETA: 9s - loss: 0.6668 - acc: 0.589 - ETA: 8s - loss: 0.6668 - acc: 0.589 - ETA: 8s - loss: 0.6667 - acc: 0.589 - ETA: 8s - loss: 0.6667 - acc: 0.589 - ETA: 7s - loss: 0.6667 - acc: 0.589 - ETA: 7s - loss: 0.6667 - acc: 0.589 - ETA: 7s - loss: 0.6667 - acc: 0.589 - ETA: 6s - loss: 0.6667 - acc: 0.589 - ETA: 6s - loss: 0.6667 - acc: 0.589 - ETA: 6s - loss: 0.6667 - acc: 0.589 - ETA: 6s - loss: 0.6667 - acc: 0.589 - ETA: 5s - loss: 0.6666 - acc: 0.589 - ETA: 5s - loss: 0.6666 - acc: 0.589 - ETA: 5s - loss: 0.6666 - acc: 0.589 - ETA: 4s - loss: 0.6666 - acc: 0.589 - ETA: 4s - loss: 0.6666 - acc: 0.589 - ETA: 4s - loss: 0.6666 - acc: 0.589 - ETA: 3s - loss: 0.6666 - acc: 0.589 - ETA: 3s - loss: 0.6665 - acc: 0.589 - ETA: 3s - loss: 0.6665 - acc: 0.589 - ETA: 2s - loss: 0.6666 - acc: 0.589 - ETA: 2s - loss: 0.6666 - acc: 0.589 - ETA: 2s - loss: 0.6666 - acc: 0.589 - ETA: 2s - loss: 0.6666 - acc: 0.589 - ETA: 1s - loss: 0.6666 - acc: 0.589 - ETA: 1s - loss: 0.6666 - acc: 0.589 - ETA: 1s - loss: 0.6666 - acc: 0.589 - ETA: 0s - loss: 0.6666 - acc: 0.589 - ETA: 0s - loss: 0.6666 - acc: 0.589 - ETA: 0s - loss: 0.6666 - acc: 0.589 - 15s 6us/step - loss: 0.6666 - acc: 0.5900 - val_loss: 0.6541 - val_acc: 0.6238 Epoch 10/1000 2497802/2497802 [==============================] - ETA: 17s - loss: 0.6674 - acc: 0.58 - ETA: 15s - loss: 0.6676 - acc: 0.58 - ETA: 14s - loss: 0.6668 - acc: 0.58 - ETA: 14s - loss: 0.6666 - acc: 0.58 - ETA: 13s - loss: 0.6668 - acc: 0.58 - ETA: 13s - loss: 0.6669 - acc: 0.58 - ETA: 12s - loss: 0.6670 - acc: 0.58 - ETA: 12s - loss: 0.6669 - acc: 0.58 - ETA: 12s - loss: 0.6670 - acc: 0.58 - ETA: 11s - loss: 0.6671 - acc: 0.58 - ETA: 11s - loss: 0.6670 - acc: 0.58 - ETA: 11s - loss: 0.6668 - acc: 0.58 - ETA: 10s - loss: 0.6667 - acc: 0.58 - ETA: 10s - loss: 0.6667 - acc: 0.58 - ETA: 10s - loss: 0.6667 - acc: 0.58 - ETA: 9s - loss: 0.6667 - acc: 0.5898 - ETA: 9s - loss: 0.6666 - acc: 0.589 - ETA: 9s - loss: 0.6667 - acc: 0.589 - ETA: 8s - loss: 0.6667 - acc: 0.589 - ETA: 8s - loss: 0.6667 - acc: 0.589 - ETA: 8s - loss: 0.6668 - acc: 0.589 - ETA: 8s - loss: 0.6668 - acc: 0.589 - ETA: 7s - loss: 0.6668 - acc: 0.589 - ETA: 7s - loss: 0.6668 - acc: 0.589 - ETA: 7s - loss: 0.6668 - acc: 0.589 - ETA: 6s - loss: 0.6668 - acc: 0.589 - ETA: 6s - loss: 0.6667 - acc: 0.589 - ETA: 6s - loss: 0.6667 - acc: 0.589 - ETA: 5s - loss: 0.6668 - acc: 0.589 - ETA: 5s - loss: 0.6668 - acc: 0.589 - ETA: 5s - loss: 0.6669 - acc: 0.589 - ETA: 5s - loss: 0.6668 - acc: 0.589 - ETA: 4s - loss: 0.6668 - acc: 0.589 - ETA: 4s - loss: 0.6668 - acc: 0.589 - ETA: 4s - loss: 0.6668 - acc: 0.590 - ETA: 3s - loss: 0.6668 - acc: 0.590 - ETA: 3s - loss: 0.6668 - acc: 0.590 - ETA: 3s - loss: 0.6668 - acc: 0.590 - ETA: 3s - loss: 0.6668 - acc: 0.590 - ETA: 2s - loss: 0.6667 - acc: 0.590 - ETA: 2s - loss: 0.6667 - acc: 0.590 - ETA: 2s - loss: 0.6667 - acc: 0.590 - ETA: 1s - loss: 0.6667 - acc: 0.590 - ETA: 1s - loss: 0.6667 - acc: 0.590 - ETA: 1s - loss: 0.6667 - acc: 0.590 - ETA: 1s - loss: 0.6667 - acc: 0.590 - ETA: 0s - loss: 0.6667 - acc: 0.590 - ETA: 0s - loss: 0.6667 - acc: 0.590 - ETA: 0s - loss: 0.6667 - acc: 0.590 - 15s 6us/step - loss: 0.6667 - acc: 0.5902 - val_loss: 0.6537 - val_acc: 0.6237 Epoch 11/1000 2497802/2497802 [==============================] - ETA: 16s - loss: 0.6669 - acc: 0.59 - ETA: 14s - loss: 0.6669 - acc: 0.59 - ETA: 13s - loss: 0.6669 - acc: 0.58 - ETA: 13s - loss: 0.6665 - acc: 0.59 - ETA: 12s - loss: 0.6667 - acc: 0.59 - ETA: 12s - loss: 0.6666 - acc: 0.59 - ETA: 11s - loss: 0.6666 - acc: 0.59 - ETA: 11s - loss: 0.6665 - acc: 0.58 - ETA: 11s - loss: 0.6665 - acc: 0.59 - ETA: 11s - loss: 0.6666 - acc: 0.58 - ETA: 10s - loss: 0.6666 - acc: 0.59 - ETA: 10s - loss: 0.6667 - acc: 0.59 - ETA: 10s - loss: 0.6666 - acc: 0.59 - ETA: 10s - loss: 0.6666 - acc: 0.59 - ETA: 9s - loss: 0.6665 - acc: 0.5906 - ETA: 9s - loss: 0.6665 - acc: 0.590 - ETA: 9s - loss: 0.6664 - acc: 0.590 - ETA: 8s - loss: 0.6664 - acc: 0.590 - ETA: 8s - loss: 0.6665 - acc: 0.590 - ETA: 8s - loss: 0.6666 - acc: 0.590 - ETA: 8s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6665 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6665 - acc: 0.590 - ETA: 6s - loss: 0.6665 - acc: 0.590 - ETA: 5s - loss: 0.6665 - acc: 0.590 - ETA: 5s - loss: 0.6665 - acc: 0.590 - ETA: 5s - loss: 0.6665 - acc: 0.590 - ETA: 5s - loss: 0.6665 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6664 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 1s - loss: 0.6665 - acc: 0.590 - ETA: 1s - loss: 0.6665 - acc: 0.590 - ETA: 1s - loss: 0.6665 - acc: 0.590 - ETA: 1s - loss: 0.6665 - acc: 0.590 - ETA: 0s - loss: 0.6665 - acc: 0.590 - ETA: 0s - loss: 0.6665 - acc: 0.590 - ETA: 0s - loss: 0.6665 - acc: 0.590 - 15s 6us/step - loss: 0.6665 - acc: 0.5903 - val_loss: 0.6538 - val_acc: 0.6238 Epoch 12/1000 2497802/2497802 [==============================] - ETA: 15s - loss: 0.6672 - acc: 0.58 - ETA: 14s - loss: 0.6666 - acc: 0.58 - ETA: 13s - loss: 0.6667 - acc: 0.58 - ETA: 13s - loss: 0.6666 - acc: 0.58 - ETA: 13s - loss: 0.6663 - acc: 0.58 - ETA: 12s - loss: 0.6664 - acc: 0.58 - ETA: 12s - loss: 0.6667 - acc: 0.58 - ETA: 12s - loss: 0.6667 - acc: 0.58 - ETA: 11s - loss: 0.6667 - acc: 0.58 - ETA: 11s - loss: 0.6665 - acc: 0.58 - ETA: 10s - loss: 0.6667 - acc: 0.58 - ETA: 10s - loss: 0.6667 - acc: 0.58 - ETA: 10s - loss: 0.6669 - acc: 0.58 - ETA: 10s - loss: 0.6670 - acc: 0.58 - ETA: 9s - loss: 0.6670 - acc: 0.5892 - ETA: 9s - loss: 0.6669 - acc: 0.589 - ETA: 9s - loss: 0.6669 - acc: 0.589 - ETA: 8s - loss: 0.6669 - acc: 0.589 - ETA: 8s - loss: 0.6670 - acc: 0.589 - ETA: 8s - loss: 0.6669 - acc: 0.589 - ETA: 8s - loss: 0.6670 - acc: 0.589 - ETA: 7s - loss: 0.6670 - acc: 0.589 - ETA: 7s - loss: 0.6670 - acc: 0.589 - ETA: 7s - loss: 0.6670 - acc: 0.589 - ETA: 6s - loss: 0.6669 - acc: 0.589 - ETA: 6s - loss: 0.6669 - acc: 0.589 - ETA: 6s - loss: 0.6670 - acc: 0.589 - ETA: 6s - loss: 0.6669 - acc: 0.589 - ETA: 5s - loss: 0.6669 - acc: 0.589 - ETA: 5s - loss: 0.6669 - acc: 0.589 - ETA: 5s - loss: 0.6668 - acc: 0.589 - ETA: 4s - loss: 0.6668 - acc: 0.589 - ETA: 4s - loss: 0.6668 - acc: 0.589 - ETA: 4s - loss: 0.6668 - acc: 0.589 - ETA: 4s - loss: 0.6668 - acc: 0.589 - ETA: 3s - loss: 0.6668 - acc: 0.589 - ETA: 3s - loss: 0.6668 - acc: 0.590 - ETA: 3s - loss: 0.6668 - acc: 0.590 - ETA: 3s - loss: 0.6668 - acc: 0.590 - ETA: 2s - loss: 0.6668 - acc: 0.590 - ETA: 2s - loss: 0.6668 - acc: 0.589 - ETA: 2s - loss: 0.6668 - acc: 0.590 - ETA: 1s - loss: 0.6667 - acc: 0.590 - ETA: 1s - loss: 0.6667 - acc: 0.590 - ETA: 1s - loss: 0.6667 - acc: 0.590 - ETA: 1s - loss: 0.6667 - acc: 0.590 - ETA: 0s - loss: 0.6667 - acc: 0.589 - ETA: 0s - loss: 0.6667 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - 15s 6us/step - loss: 0.6666 - acc: 0.5900 - val_loss: 0.6539 - val_acc: 0.6238 Epoch 13/1000 2497802/2497802 [==============================] - ETA: 16s - loss: 0.6654 - acc: 0.59 - ETA: 14s - loss: 0.6660 - acc: 0.59 - ETA: 14s - loss: 0.6669 - acc: 0.58 - ETA: 13s - loss: 0.6668 - acc: 0.58 - ETA: 13s - loss: 0.6672 - acc: 0.58 - ETA: 12s - loss: 0.6669 - acc: 0.58 - ETA: 12s - loss: 0.6666 - acc: 0.58 - ETA: 12s - loss: 0.6665 - acc: 0.58 - ETA: 11s - loss: 0.6665 - acc: 0.58 - ETA: 11s - loss: 0.6665 - acc: 0.59 - ETA: 11s - loss: 0.6664 - acc: 0.59 - ETA: 10s - loss: 0.6664 - acc: 0.59 - ETA: 10s - loss: 0.6663 - acc: 0.59 - ETA: 10s - loss: 0.6663 - acc: 0.59 - ETA: 10s - loss: 0.6665 - acc: 0.59 - ETA: 9s - loss: 0.6666 - acc: 0.5901 - ETA: 9s - loss: 0.6666 - acc: 0.590 - ETA: 9s - loss: 0.6667 - acc: 0.589 - ETA: 8s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6666 - acc: 0.590 - ETA: 8s - loss: 0.6665 - acc: 0.590 - ETA: 7s - loss: 0.6664 - acc: 0.590 - ETA: 7s - loss: 0.6664 - acc: 0.590 - ETA: 7s - loss: 0.6664 - acc: 0.590 - ETA: 7s - loss: 0.6664 - acc: 0.590 - ETA: 6s - loss: 0.6665 - acc: 0.590 - ETA: 6s - loss: 0.6665 - acc: 0.590 - ETA: 6s - loss: 0.6665 - acc: 0.590 - ETA: 5s - loss: 0.6665 - acc: 0.590 - ETA: 5s - loss: 0.6664 - acc: 0.590 - ETA: 5s - loss: 0.6665 - acc: 0.590 - ETA: 5s - loss: 0.6665 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6665 - acc: 0.590 - ETA: 1s - loss: 0.6665 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - 15s 6us/step - loss: 0.6666 - acc: 0.5903 - val_loss: 0.6542 - val_acc: 0.6239 Epoch 14/1000 2497802/2497802 [==============================] - ETA: 16s - loss: 0.6675 - acc: 0.58 - ETA: 14s - loss: 0.6673 - acc: 0.58 - ETA: 14s - loss: 0.6666 - acc: 0.58 - ETA: 13s - loss: 0.6661 - acc: 0.59 - ETA: 13s - loss: 0.6663 - acc: 0.59 - ETA: 12s - loss: 0.6663 - acc: 0.59 - ETA: 12s - loss: 0.6662 - acc: 0.59 - ETA: 12s - loss: 0.6663 - acc: 0.59 - ETA: 11s - loss: 0.6664 - acc: 0.59 - ETA: 11s - loss: 0.6665 - acc: 0.59 - ETA: 11s - loss: 0.6665 - acc: 0.59 - ETA: 10s - loss: 0.6665 - acc: 0.59 - ETA: 10s - loss: 0.6666 - acc: 0.59 - ETA: 10s - loss: 0.6666 - acc: 0.59 - ETA: 9s - loss: 0.6666 - acc: 0.5909 - ETA: 9s - loss: 0.6665 - acc: 0.590 - ETA: 9s - loss: 0.6666 - acc: 0.590 - ETA: 9s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6668 - acc: 0.590 - ETA: 8s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.590 - ETA: 3s - loss: 0.6666 - acc: 0.590 - ETA: 3s - loss: 0.6666 - acc: 0.590 - ETA: 3s - loss: 0.6666 - acc: 0.590 - ETA: 3s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6667 - acc: 0.590 - ETA: 1s - loss: 0.6667 - acc: 0.590 - ETA: 1s - loss: 0.6667 - acc: 0.590 - ETA: 1s - loss: 0.6667 - acc: 0.590 - ETA: 0s - loss: 0.6667 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6667 - acc: 0.590 - 15s 6us/step - loss: 0.6666 - acc: 0.5904 - val_loss: 0.6543 - val_acc: 0.6238 Epoch 15/1000 2497802/2497802 [==============================] - ETA: 16s - loss: 0.6670 - acc: 0.59 - ETA: 14s - loss: 0.6672 - acc: 0.58 - ETA: 13s - loss: 0.6672 - acc: 0.59 - ETA: 13s - loss: 0.6673 - acc: 0.58 - ETA: 13s - loss: 0.6671 - acc: 0.58 - ETA: 12s - loss: 0.6669 - acc: 0.58 - ETA: 12s - loss: 0.6669 - acc: 0.58 - ETA: 12s - loss: 0.6670 - acc: 0.58 - ETA: 11s - loss: 0.6671 - acc: 0.58 - ETA: 11s - loss: 0.6671 - acc: 0.58 - ETA: 11s - loss: 0.6670 - acc: 0.58 - ETA: 10s - loss: 0.6669 - acc: 0.59 - ETA: 10s - loss: 0.6668 - acc: 0.59 - ETA: 10s - loss: 0.6668 - acc: 0.59 - ETA: 10s - loss: 0.6669 - acc: 0.58 - ETA: 9s - loss: 0.6668 - acc: 0.5899 - ETA: 9s - loss: 0.6668 - acc: 0.589 - ETA: 9s - loss: 0.6669 - acc: 0.589 - ETA: 8s - loss: 0.6668 - acc: 0.589 - ETA: 8s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6665 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6667 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.590 - ETA: 3s - loss: 0.6667 - acc: 0.590 - ETA: 3s - loss: 0.6666 - acc: 0.590 - ETA: 3s - loss: 0.6666 - acc: 0.590 - ETA: 3s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6665 - acc: 0.590 - ETA: 1s - loss: 0.6665 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - 15s 6us/step - loss: 0.6666 - acc: 0.5902 - val_loss: 0.6540 - val_acc: 0.6239 Epoch 16/1000 2497802/2497802 [==============================] - ETA: 16s - loss: 0.6657 - acc: 0.59 - ETA: 14s - loss: 0.6663 - acc: 0.59 - ETA: 14s - loss: 0.6664 - acc: 0.58 - ETA: 13s - loss: 0.6667 - acc: 0.58 - ETA: 13s - loss: 0.6664 - acc: 0.58 - ETA: 12s - loss: 0.6665 - acc: 0.59 - ETA: 12s - loss: 0.6666 - acc: 0.58 - ETA: 12s - loss: 0.6664 - acc: 0.59 - ETA: 11s - loss: 0.6664 - acc: 0.58 - ETA: 11s - loss: 0.6666 - acc: 0.58 - ETA: 11s - loss: 0.6666 - acc: 0.58 - ETA: 10s - loss: 0.6667 - acc: 0.58 - ETA: 10s - loss: 0.6666 - acc: 0.58 - ETA: 10s - loss: 0.6666 - acc: 0.58 - ETA: 9s - loss: 0.6667 - acc: 0.5898 - ETA: 9s - loss: 0.6667 - acc: 0.589 - ETA: 9s - loss: 0.6667 - acc: 0.589 - ETA: 8s - loss: 0.6666 - acc: 0.590 - ETA: 8s - loss: 0.6665 - acc: 0.590 - ETA: 8s - loss: 0.6666 - acc: 0.590 - ETA: 8s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 6s - loss: 0.6667 - acc: 0.589 - ETA: 6s - loss: 0.6667 - acc: 0.590 - ETA: 6s - loss: 0.6667 - acc: 0.590 - ETA: 6s - loss: 0.6667 - acc: 0.590 - ETA: 5s - loss: 0.6667 - acc: 0.590 - ETA: 5s - loss: 0.6667 - acc: 0.590 - ETA: 5s - loss: 0.6667 - acc: 0.590 - ETA: 4s - loss: 0.6667 - acc: 0.590 - ETA: 4s - loss: 0.6667 - acc: 0.590 - ETA: 4s - loss: 0.6667 - acc: 0.590 - ETA: 4s - loss: 0.6667 - acc: 0.590 - ETA: 3s - loss: 0.6667 - acc: 0.590 - ETA: 3s - loss: 0.6667 - acc: 0.590 - ETA: 3s - loss: 0.6667 - acc: 0.590 - ETA: 3s - loss: 0.6667 - acc: 0.590 - ETA: 2s - loss: 0.6667 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6665 - acc: 0.590 - ETA: 0s - loss: 0.6665 - acc: 0.590 - ETA: 0s - loss: 0.6665 - acc: 0.590 - 15s 6us/step - loss: 0.6665 - acc: 0.5906 - val_loss: 0.6535 - val_acc: 0.6238 Epoch 17/1000 2497802/2497802 [==============================] - ETA: 16s - loss: 0.6666 - acc: 0.59 - ETA: 14s - loss: 0.6662 - acc: 0.59 - ETA: 13s - loss: 0.6663 - acc: 0.59 - ETA: 13s - loss: 0.6663 - acc: 0.59 - ETA: 13s - loss: 0.6662 - acc: 0.59 - ETA: 12s - loss: 0.6660 - acc: 0.59 - ETA: 12s - loss: 0.6658 - acc: 0.59 - ETA: 12s - loss: 0.6657 - acc: 0.59 - ETA: 11s - loss: 0.6658 - acc: 0.59 - ETA: 11s - loss: 0.6660 - acc: 0.59 - ETA: 11s - loss: 0.6662 - acc: 0.59 - ETA: 10s - loss: 0.6663 - acc: 0.59 - ETA: 10s - loss: 0.6663 - acc: 0.59 - ETA: 10s - loss: 0.6662 - acc: 0.59 - ETA: 9s - loss: 0.6662 - acc: 0.5904 - ETA: 9s - loss: 0.6662 - acc: 0.590 - ETA: 9s - loss: 0.6662 - acc: 0.590 - ETA: 9s - loss: 0.6663 - acc: 0.590 - ETA: 8s - loss: 0.6664 - acc: 0.589 - ETA: 8s - loss: 0.6664 - acc: 0.590 - ETA: 8s - loss: 0.6664 - acc: 0.590 - ETA: 7s - loss: 0.6665 - acc: 0.590 - ETA: 7s - loss: 0.6665 - acc: 0.590 - ETA: 7s - loss: 0.6665 - acc: 0.590 - ETA: 7s - loss: 0.6665 - acc: 0.590 - ETA: 6s - loss: 0.6665 - acc: 0.590 - ETA: 6s - loss: 0.6665 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6665 - acc: 0.590 - ETA: 1s - loss: 0.6665 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - 14s 6us/step - loss: 0.6666 - acc: 0.5902 - val_loss: 0.6539 - val_acc: 0.6239 Epoch 18/1000 2497802/2497802 [==============================] - ETA: 16s - loss: 0.6674 - acc: 0.59 - ETA: 14s - loss: 0.6665 - acc: 0.59 - ETA: 13s - loss: 0.6668 - acc: 0.59 - ETA: 13s - loss: 0.6667 - acc: 0.59 - ETA: 13s - loss: 0.6667 - acc: 0.59 - ETA: 12s - loss: 0.6664 - acc: 0.59 - ETA: 12s - loss: 0.6666 - acc: 0.59 - ETA: 12s - loss: 0.6667 - acc: 0.59 - ETA: 11s - loss: 0.6668 - acc: 0.59 - ETA: 11s - loss: 0.6669 - acc: 0.59 - ETA: 11s - loss: 0.6669 - acc: 0.59 - ETA: 10s - loss: 0.6670 - acc: 0.59 - ETA: 10s - loss: 0.6670 - acc: 0.59 - ETA: 10s - loss: 0.6669 - acc: 0.59 - ETA: 9s - loss: 0.6668 - acc: 0.5904 - ETA: 9s - loss: 0.6668 - acc: 0.590 - ETA: 9s - loss: 0.6668 - acc: 0.590 - ETA: 9s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6665 - acc: 0.590 - ETA: 6s - loss: 0.6665 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6665 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6664 - acc: 0.590 - ETA: 2s - loss: 0.6664 - acc: 0.590 - ETA: 1s - loss: 0.6665 - acc: 0.590 - ETA: 1s - loss: 0.6664 - acc: 0.590 - ETA: 1s - loss: 0.6664 - acc: 0.590 - ETA: 1s - loss: 0.6664 - acc: 0.590 - ETA: 0s - loss: 0.6664 - acc: 0.590 - ETA: 0s - loss: 0.6665 - acc: 0.590 - ETA: 0s - loss: 0.6664 - acc: 0.590 - 15s 6us/step - loss: 0.6664 - acc: 0.5907 - val_loss: 0.6539 - val_acc: 0.6242 Epoch 19/1000 2497802/2497802 [==============================] - ETA: 16s - loss: 0.6657 - acc: 0.59 - ETA: 15s - loss: 0.6664 - acc: 0.59 - ETA: 14s - loss: 0.6662 - acc: 0.59 - ETA: 13s - loss: 0.6662 - acc: 0.59 - ETA: 13s - loss: 0.6661 - acc: 0.59 - ETA: 12s - loss: 0.6661 - acc: 0.59 - ETA: 12s - loss: 0.6660 - acc: 0.59 - ETA: 12s - loss: 0.6661 - acc: 0.59 - ETA: 11s - loss: 0.6661 - acc: 0.59 - ETA: 11s - loss: 0.6663 - acc: 0.59 - ETA: 11s - loss: 0.6664 - acc: 0.59 - ETA: 10s - loss: 0.6664 - acc: 0.59 - ETA: 10s - loss: 0.6666 - acc: 0.59 - ETA: 10s - loss: 0.6666 - acc: 0.59 - ETA: 10s - loss: 0.6665 - acc: 0.59 - ETA: 9s - loss: 0.6666 - acc: 0.5907 - ETA: 9s - loss: 0.6665 - acc: 0.590 - ETA: 9s - loss: 0.6666 - acc: 0.590 - ETA: 8s - loss: 0.6666 - acc: 0.590 - ETA: 8s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6665 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6665 - acc: 0.590 - ETA: 6s - loss: 0.6665 - acc: 0.590 - ETA: 6s - loss: 0.6665 - acc: 0.590 - ETA: 5s - loss: 0.6665 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.590 - ETA: 3s - loss: 0.6666 - acc: 0.590 - ETA: 3s - loss: 0.6666 - acc: 0.590 - ETA: 3s - loss: 0.6666 - acc: 0.590 - ETA: 3s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6667 - acc: 0.590 - ETA: 2s - loss: 0.6667 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - 15s 6us/step - loss: 0.6666 - acc: 0.5903 - val_loss: 0.6541 - val_acc: 0.6241 Epoch 20/1000 2497802/2497802 [==============================] - ETA: 15s - loss: 0.6661 - acc: 0.59 - ETA: 13s - loss: 0.6668 - acc: 0.59 - ETA: 13s - loss: 0.6665 - acc: 0.59 - ETA: 12s - loss: 0.6664 - acc: 0.59 - ETA: 12s - loss: 0.6661 - acc: 0.59 - ETA: 13s - loss: 0.6660 - acc: 0.59 - ETA: 13s - loss: 0.6661 - acc: 0.59 - ETA: 12s - loss: 0.6660 - acc: 0.59 - ETA: 12s - loss: 0.6662 - acc: 0.59 - ETA: 11s - loss: 0.6662 - acc: 0.59 - ETA: 11s - loss: 0.6662 - acc: 0.59 - ETA: 11s - loss: 0.6663 - acc: 0.59 - ETA: 10s - loss: 0.6663 - acc: 0.59 - ETA: 10s - loss: 0.6663 - acc: 0.59 - ETA: 10s - loss: 0.6664 - acc: 0.59 - ETA: 10s - loss: 0.6663 - acc: 0.59 - ETA: 9s - loss: 0.6663 - acc: 0.5911 - ETA: 9s - loss: 0.6663 - acc: 0.591 - ETA: 9s - loss: 0.6664 - acc: 0.590 - ETA: 8s - loss: 0.6664 - acc: 0.590 - ETA: 8s - loss: 0.6664 - acc: 0.590 - ETA: 8s - loss: 0.6664 - acc: 0.590 - ETA: 7s - loss: 0.6664 - acc: 0.590 - ETA: 7s - loss: 0.6664 - acc: 0.590 - ETA: 7s - loss: 0.6665 - acc: 0.590 - ETA: 6s - loss: 0.6665 - acc: 0.590 - ETA: 6s - loss: 0.6664 - acc: 0.590 - ETA: 6s - loss: 0.6665 - acc: 0.590 - ETA: 6s - loss: 0.6664 - acc: 0.590 - ETA: 5s - loss: 0.6665 - acc: 0.590 - ETA: 5s - loss: 0.6665 - acc: 0.590 - ETA: 5s - loss: 0.6665 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.589 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 1s - loss: 0.6665 - acc: 0.590 - ETA: 1s - loss: 0.6665 - acc: 0.590 - ETA: 1s - loss: 0.6665 - acc: 0.590 - ETA: 0s - loss: 0.6665 - acc: 0.590 - ETA: 0s - loss: 0.6665 - acc: 0.590 - ETA: 0s - loss: 0.6665 - acc: 0.590 - 15s 6us/step - loss: 0.6665 - acc: 0.5903 - val_loss: 0.6538 - val_acc: 0.6240 Epoch 21/1000 2497802/2497802 [==============================] - ETA: 19s - loss: 0.6668 - acc: 0.58 - ETA: 16s - loss: 0.6668 - acc: 0.58 - ETA: 15s - loss: 0.6672 - acc: 0.58 - ETA: 15s - loss: 0.6670 - acc: 0.58 - ETA: 15s - loss: 0.6672 - acc: 0.58 - ETA: 15s - loss: 0.6670 - acc: 0.58 - ETA: 15s - loss: 0.6671 - acc: 0.58 - ETA: 14s - loss: 0.6670 - acc: 0.58 - ETA: 13s - loss: 0.6669 - acc: 0.58 - ETA: 13s - loss: 0.6668 - acc: 0.58 - ETA: 13s - loss: 0.6668 - acc: 0.58 - ETA: 12s - loss: 0.6668 - acc: 0.58 - ETA: 12s - loss: 0.6668 - acc: 0.58 - ETA: 11s - loss: 0.6668 - acc: 0.58 - ETA: 11s - loss: 0.6668 - acc: 0.58 - ETA: 11s - loss: 0.6667 - acc: 0.59 - ETA: 10s - loss: 0.6666 - acc: 0.59 - ETA: 10s - loss: 0.6666 - acc: 0.59 - ETA: 9s - loss: 0.6664 - acc: 0.5904 - ETA: 9s - loss: 0.6664 - acc: 0.590 - ETA: 9s - loss: 0.6665 - acc: 0.590 - ETA: 8s - loss: 0.6666 - acc: 0.590 - ETA: 8s - loss: 0.6666 - acc: 0.590 - ETA: 8s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6668 - acc: 0.589 - ETA: 6s - loss: 0.6668 - acc: 0.589 - ETA: 6s - loss: 0.6667 - acc: 0.589 - ETA: 6s - loss: 0.6668 - acc: 0.589 - ETA: 5s - loss: 0.6667 - acc: 0.589 - ETA: 5s - loss: 0.6667 - acc: 0.589 - ETA: 5s - loss: 0.6668 - acc: 0.589 - ETA: 4s - loss: 0.6668 - acc: 0.589 - ETA: 4s - loss: 0.6667 - acc: 0.589 - ETA: 4s - loss: 0.6667 - acc: 0.589 - ETA: 3s - loss: 0.6667 - acc: 0.589 - ETA: 3s - loss: 0.6667 - acc: 0.589 - ETA: 3s - loss: 0.6667 - acc: 0.589 - ETA: 3s - loss: 0.6667 - acc: 0.589 - ETA: 2s - loss: 0.6667 - acc: 0.589 - ETA: 2s - loss: 0.6666 - acc: 0.589 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - 16s 6us/step - loss: 0.6666 - acc: 0.5902 - val_loss: 0.6535 - val_acc: 0.6239 Epoch 22/1000 2497802/2497802 [==============================] - ETA: 19s - loss: 0.6686 - acc: 0.58 - ETA: 16s - loss: 0.6673 - acc: 0.59 - ETA: 15s - loss: 0.6667 - acc: 0.59 - ETA: 14s - loss: 0.6666 - acc: 0.59 - ETA: 13s - loss: 0.6667 - acc: 0.59 - ETA: 13s - loss: 0.6666 - acc: 0.59 - ETA: 13s - loss: 0.6666 - acc: 0.59 - ETA: 12s - loss: 0.6668 - acc: 0.59 - ETA: 12s - loss: 0.6668 - acc: 0.59 - ETA: 11s - loss: 0.6669 - acc: 0.59 - ETA: 11s - loss: 0.6669 - acc: 0.59 - ETA: 11s - loss: 0.6669 - acc: 0.59 - ETA: 10s - loss: 0.6669 - acc: 0.59 - ETA: 10s - loss: 0.6669 - acc: 0.59 - ETA: 10s - loss: 0.6667 - acc: 0.59 - ETA: 10s - loss: 0.6667 - acc: 0.59 - ETA: 9s - loss: 0.6667 - acc: 0.5903 - ETA: 9s - loss: 0.6667 - acc: 0.590 - ETA: 9s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6668 - acc: 0.590 - ETA: 7s - loss: 0.6668 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 6s - loss: 0.6667 - acc: 0.590 - ETA: 6s - loss: 0.6667 - acc: 0.590 - ETA: 6s - loss: 0.6667 - acc: 0.590 - ETA: 5s - loss: 0.6667 - acc: 0.590 - ETA: 5s - loss: 0.6667 - acc: 0.590 - ETA: 5s - loss: 0.6667 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6665 - acc: 0.590 - ETA: 0s - loss: 0.6665 - acc: 0.590 - ETA: 0s - loss: 0.6665 - acc: 0.590 - 16s 6us/step - loss: 0.6665 - acc: 0.5902 - val_loss: 0.6540 - val_acc: 0.6240 Epoch 23/1000 2497802/2497802 [==============================] - ETA: 20s - loss: 0.6680 - acc: 0.59 - ETA: 16s - loss: 0.6674 - acc: 0.59 - ETA: 14s - loss: 0.6672 - acc: 0.59 - ETA: 13s - loss: 0.6670 - acc: 0.59 - ETA: 13s - loss: 0.6669 - acc: 0.59 - ETA: 13s - loss: 0.6669 - acc: 0.59 - ETA: 12s - loss: 0.6667 - acc: 0.59 - ETA: 12s - loss: 0.6668 - acc: 0.59 - ETA: 12s - loss: 0.6667 - acc: 0.59 - ETA: 11s - loss: 0.6669 - acc: 0.59 - ETA: 11s - loss: 0.6668 - acc: 0.59 - ETA: 10s - loss: 0.6668 - acc: 0.59 - ETA: 10s - loss: 0.6668 - acc: 0.59 - ETA: 10s - loss: 0.6668 - acc: 0.59 - ETA: 9s - loss: 0.6669 - acc: 0.5901 - ETA: 9s - loss: 0.6669 - acc: 0.590 - ETA: 9s - loss: 0.6668 - acc: 0.590 - ETA: 9s - loss: 0.6668 - acc: 0.590 - ETA: 8s - loss: 0.6668 - acc: 0.589 - ETA: 8s - loss: 0.6668 - acc: 0.589 - ETA: 8s - loss: 0.6667 - acc: 0.589 - ETA: 7s - loss: 0.6668 - acc: 0.589 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 6s - loss: 0.6667 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.589 - ETA: 5s - loss: 0.6666 - acc: 0.589 - ETA: 5s - loss: 0.6667 - acc: 0.589 - ETA: 5s - loss: 0.6667 - acc: 0.589 - ETA: 4s - loss: 0.6667 - acc: 0.589 - ETA: 4s - loss: 0.6667 - acc: 0.589 - ETA: 4s - loss: 0.6667 - acc: 0.589 - ETA: 3s - loss: 0.6667 - acc: 0.589 - ETA: 3s - loss: 0.6667 - acc: 0.589 - ETA: 3s - loss: 0.6667 - acc: 0.589 - ETA: 3s - loss: 0.6667 - acc: 0.589 - ETA: 2s - loss: 0.6667 - acc: 0.589 - ETA: 2s - loss: 0.6667 - acc: 0.589 - ETA: 2s - loss: 0.6667 - acc: 0.589 - ETA: 1s - loss: 0.6666 - acc: 0.589 - ETA: 1s - loss: 0.6666 - acc: 0.589 - ETA: 1s - loss: 0.6667 - acc: 0.589 - ETA: 1s - loss: 0.6666 - acc: 0.589 - ETA: 0s - loss: 0.6666 - acc: 0.589 - ETA: 0s - loss: 0.6666 - acc: 0.589 - ETA: 0s - loss: 0.6666 - acc: 0.589 - 15s 6us/step - loss: 0.6666 - acc: 0.5899 - val_loss: 0.6541 - val_acc: 0.6241 Epoch 24/1000 2497802/2497802 [==============================] - ETA: 18s - loss: 0.6661 - acc: 0.59 - ETA: 16s - loss: 0.6669 - acc: 0.58 - ETA: 15s - loss: 0.6667 - acc: 0.58 - ETA: 14s - loss: 0.6668 - acc: 0.58 - ETA: 14s - loss: 0.6669 - acc: 0.59 - ETA: 13s - loss: 0.6668 - acc: 0.59 - ETA: 13s - loss: 0.6668 - acc: 0.59 - ETA: 12s - loss: 0.6666 - acc: 0.59 - ETA: 12s - loss: 0.6664 - acc: 0.59 - ETA: 12s - loss: 0.6662 - acc: 0.59 - ETA: 11s - loss: 0.6664 - acc: 0.59 - ETA: 11s - loss: 0.6664 - acc: 0.59 - ETA: 11s - loss: 0.6664 - acc: 0.59 - ETA: 10s - loss: 0.6664 - acc: 0.59 - ETA: 10s - loss: 0.6664 - acc: 0.59 - ETA: 10s - loss: 0.6664 - acc: 0.59 - ETA: 9s - loss: 0.6663 - acc: 0.5908 - ETA: 9s - loss: 0.6663 - acc: 0.590 - ETA: 9s - loss: 0.6663 - acc: 0.590 - ETA: 8s - loss: 0.6663 - acc: 0.590 - ETA: 8s - loss: 0.6664 - acc: 0.590 - ETA: 8s - loss: 0.6665 - acc: 0.590 - ETA: 7s - loss: 0.6665 - acc: 0.590 - ETA: 7s - loss: 0.6665 - acc: 0.590 - ETA: 7s - loss: 0.6665 - acc: 0.590 - ETA: 7s - loss: 0.6665 - acc: 0.590 - ETA: 6s - loss: 0.6665 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6665 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 1s - loss: 0.6665 - acc: 0.590 - ETA: 1s - loss: 0.6665 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6665 - acc: 0.590 - ETA: 0s - loss: 0.6665 - acc: 0.590 - ETA: 0s - loss: 0.6665 - acc: 0.590 - 15s 6us/step - loss: 0.6665 - acc: 0.5905 - val_loss: 0.6538 - val_acc: 0.6242 Epoch 25/1000 2497802/2497802 [==============================] - ETA: 16s - loss: 0.6677 - acc: 0.58 - ETA: 14s - loss: 0.6674 - acc: 0.58 - ETA: 14s - loss: 0.6673 - acc: 0.58 - ETA: 13s - loss: 0.6673 - acc: 0.58 - ETA: 13s - loss: 0.6673 - acc: 0.58 - ETA: 12s - loss: 0.6670 - acc: 0.58 - ETA: 12s - loss: 0.6669 - acc: 0.58 - ETA: 12s - loss: 0.6669 - acc: 0.58 - ETA: 11s - loss: 0.6670 - acc: 0.58 - ETA: 11s - loss: 0.6669 - acc: 0.59 - ETA: 11s - loss: 0.6669 - acc: 0.58 - ETA: 11s - loss: 0.6668 - acc: 0.58 - ETA: 10s - loss: 0.6667 - acc: 0.58 - ETA: 10s - loss: 0.6668 - acc: 0.58 - ETA: 10s - loss: 0.6667 - acc: 0.58 - ETA: 9s - loss: 0.6668 - acc: 0.5896 - ETA: 9s - loss: 0.6669 - acc: 0.589 - ETA: 9s - loss: 0.6668 - acc: 0.589 - ETA: 8s - loss: 0.6667 - acc: 0.589 - ETA: 8s - loss: 0.6667 - acc: 0.589 - ETA: 8s - loss: 0.6667 - acc: 0.590 - ETA: 8s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 6s - loss: 0.6667 - acc: 0.590 - ETA: 6s - loss: 0.6667 - acc: 0.590 - ETA: 6s - loss: 0.6667 - acc: 0.590 - ETA: 6s - loss: 0.6667 - acc: 0.590 - ETA: 5s - loss: 0.6667 - acc: 0.590 - ETA: 5s - loss: 0.6667 - acc: 0.590 - ETA: 5s - loss: 0.6667 - acc: 0.590 - ETA: 4s - loss: 0.6667 - acc: 0.590 - ETA: 4s - loss: 0.6667 - acc: 0.590 - ETA: 4s - loss: 0.6667 - acc: 0.590 - ETA: 3s - loss: 0.6667 - acc: 0.590 - ETA: 3s - loss: 0.6667 - acc: 0.590 - ETA: 3s - loss: 0.6667 - acc: 0.590 - ETA: 3s - loss: 0.6667 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6665 - acc: 0.590 - ETA: 0s - loss: 0.6665 - acc: 0.590 - ETA: 0s - loss: 0.6665 - acc: 0.590 - 15s 6us/step - loss: 0.6665 - acc: 0.5907 - val_loss: 0.6539 - val_acc: 0.6242 Epoch 26/1000 2497802/2497802 [==============================] - ETA: 15s - loss: 0.6686 - acc: 0.58 - ETA: 13s - loss: 0.6672 - acc: 0.58 - ETA: 12s - loss: 0.6668 - acc: 0.59 - ETA: 12s - loss: 0.6665 - acc: 0.59 - ETA: 11s - loss: 0.6666 - acc: 0.59 - ETA: 11s - loss: 0.6665 - acc: 0.59 - ETA: 11s - loss: 0.6665 - acc: 0.59 - ETA: 11s - loss: 0.6666 - acc: 0.59 - ETA: 11s - loss: 0.6666 - acc: 0.59 - ETA: 10s - loss: 0.6666 - acc: 0.59 - ETA: 10s - loss: 0.6665 - acc: 0.59 - ETA: 10s - loss: 0.6665 - acc: 0.59 - ETA: 10s - loss: 0.6666 - acc: 0.59 - ETA: 9s - loss: 0.6665 - acc: 0.5905 - ETA: 9s - loss: 0.6665 - acc: 0.590 - ETA: 9s - loss: 0.6664 - acc: 0.590 - ETA: 9s - loss: 0.6665 - acc: 0.590 - ETA: 8s - loss: 0.6665 - acc: 0.590 - ETA: 8s - loss: 0.6665 - acc: 0.590 - ETA: 8s - loss: 0.6665 - acc: 0.590 - ETA: 8s - loss: 0.6665 - acc: 0.590 - ETA: 7s - loss: 0.6665 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.590 - ETA: 3s - loss: 0.6666 - acc: 0.590 - ETA: 3s - loss: 0.6666 - acc: 0.590 - ETA: 3s - loss: 0.6666 - acc: 0.590 - ETA: 3s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - 15s 6us/step - loss: 0.6666 - acc: 0.5901 - val_loss: 0.6540 - val_acc: 0.6240 Epoch 27/1000 2497802/2497802 [==============================] - ETA: 18s - loss: 0.6674 - acc: 0.59 - ETA: 16s - loss: 0.6672 - acc: 0.58 - ETA: 15s - loss: 0.6670 - acc: 0.58 - ETA: 15s - loss: 0.6668 - acc: 0.59 - ETA: 15s - loss: 0.6668 - acc: 0.59 - ETA: 14s - loss: 0.6669 - acc: 0.59 - ETA: 14s - loss: 0.6671 - acc: 0.59 - ETA: 13s - loss: 0.6671 - acc: 0.59 - ETA: 13s - loss: 0.6667 - acc: 0.59 - ETA: 13s - loss: 0.6665 - acc: 0.59 - ETA: 12s - loss: 0.6665 - acc: 0.59 - ETA: 12s - loss: 0.6665 - acc: 0.59 - ETA: 12s - loss: 0.6666 - acc: 0.59 - ETA: 11s - loss: 0.6666 - acc: 0.59 - ETA: 11s - loss: 0.6666 - acc: 0.59 - ETA: 11s - loss: 0.6667 - acc: 0.59 - ETA: 10s - loss: 0.6667 - acc: 0.59 - ETA: 10s - loss: 0.6668 - acc: 0.59 - ETA: 10s - loss: 0.6667 - acc: 0.59 - ETA: 9s - loss: 0.6667 - acc: 0.5903 - ETA: 9s - loss: 0.6667 - acc: 0.590 - ETA: 9s - loss: 0.6666 - acc: 0.590 - ETA: 8s - loss: 0.6666 - acc: 0.590 - ETA: 8s - loss: 0.6665 - acc: 0.590 - ETA: 8s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6665 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - 17s 7us/step - loss: 0.6666 - acc: 0.5901 - val_loss: 0.6544 - val_acc: 0.6240 Epoch 28/1000 2497802/2497802 [==============================] - ETA: 20s - loss: 0.6648 - acc: 0.59 - ETA: 18s - loss: 0.6662 - acc: 0.59 - ETA: 17s - loss: 0.6665 - acc: 0.59 - ETA: 16s - loss: 0.6659 - acc: 0.59 - ETA: 15s - loss: 0.6660 - acc: 0.59 - ETA: 15s - loss: 0.6661 - acc: 0.59 - ETA: 15s - loss: 0.6663 - acc: 0.59 - ETA: 14s - loss: 0.6664 - acc: 0.59 - ETA: 14s - loss: 0.6663 - acc: 0.59 - ETA: 13s - loss: 0.6662 - acc: 0.59 - ETA: 13s - loss: 0.6662 - acc: 0.58 - ETA: 13s - loss: 0.6662 - acc: 0.59 - ETA: 12s - loss: 0.6662 - acc: 0.59 - ETA: 12s - loss: 0.6662 - acc: 0.59 - ETA: 11s - loss: 0.6663 - acc: 0.59 - ETA: 11s - loss: 0.6662 - acc: 0.59 - ETA: 11s - loss: 0.6663 - acc: 0.59 - ETA: 10s - loss: 0.6662 - acc: 0.59 - ETA: 10s - loss: 0.6662 - acc: 0.59 - ETA: 10s - loss: 0.6663 - acc: 0.59 - ETA: 9s - loss: 0.6662 - acc: 0.5902 - ETA: 9s - loss: 0.6662 - acc: 0.590 - ETA: 8s - loss: 0.6662 - acc: 0.590 - ETA: 8s - loss: 0.6662 - acc: 0.590 - ETA: 8s - loss: 0.6662 - acc: 0.590 - ETA: 7s - loss: 0.6662 - acc: 0.590 - ETA: 7s - loss: 0.6662 - acc: 0.590 - ETA: 7s - loss: 0.6662 - acc: 0.590 - ETA: 6s - loss: 0.6663 - acc: 0.590 - ETA: 6s - loss: 0.6662 - acc: 0.590 - ETA: 6s - loss: 0.6662 - acc: 0.590 - ETA: 5s - loss: 0.6663 - acc: 0.590 - ETA: 5s - loss: 0.6663 - acc: 0.590 - ETA: 5s - loss: 0.6663 - acc: 0.590 - ETA: 4s - loss: 0.6663 - acc: 0.590 - ETA: 4s - loss: 0.6663 - acc: 0.590 - ETA: 4s - loss: 0.6663 - acc: 0.590 - ETA: 3s - loss: 0.6664 - acc: 0.590 - ETA: 3s - loss: 0.6664 - acc: 0.590 - ETA: 3s - loss: 0.6664 - acc: 0.590 - ETA: 2s - loss: 0.6663 - acc: 0.590 - ETA: 2s - loss: 0.6663 - acc: 0.590 - ETA: 2s - loss: 0.6663 - acc: 0.590 - ETA: 1s - loss: 0.6664 - acc: 0.590 - ETA: 1s - loss: 0.6664 - acc: 0.590 - ETA: 1s - loss: 0.6664 - acc: 0.590 - ETA: 0s - loss: 0.6664 - acc: 0.590 - ETA: 0s - loss: 0.6664 - acc: 0.590 - ETA: 0s - loss: 0.6664 - acc: 0.590 - 17s 7us/step - loss: 0.6665 - acc: 0.5903 - val_loss: 0.6545 - val_acc: 0.6240 Epoch 29/1000 2497802/2497802 [==============================] - ETA: 20s - loss: 0.6668 - acc: 0.59 - ETA: 17s - loss: 0.6668 - acc: 0.59 - ETA: 16s - loss: 0.6663 - acc: 0.59 - ETA: 16s - loss: 0.6662 - acc: 0.59 - ETA: 15s - loss: 0.6663 - acc: 0.59 - ETA: 14s - loss: 0.6663 - acc: 0.59 - ETA: 14s - loss: 0.6664 - acc: 0.59 - ETA: 14s - loss: 0.6664 - acc: 0.59 - ETA: 13s - loss: 0.6663 - acc: 0.59 - ETA: 13s - loss: 0.6662 - acc: 0.59 - ETA: 13s - loss: 0.6664 - acc: 0.59 - ETA: 12s - loss: 0.6664 - acc: 0.59 - ETA: 12s - loss: 0.6666 - acc: 0.59 - ETA: 11s - loss: 0.6667 - acc: 0.59 - ETA: 11s - loss: 0.6666 - acc: 0.59 - ETA: 11s - loss: 0.6666 - acc: 0.59 - ETA: 10s - loss: 0.6666 - acc: 0.59 - ETA: 10s - loss: 0.6664 - acc: 0.59 - ETA: 9s - loss: 0.6665 - acc: 0.5905 - ETA: 9s - loss: 0.6665 - acc: 0.590 - ETA: 9s - loss: 0.6665 - acc: 0.590 - ETA: 8s - loss: 0.6665 - acc: 0.590 - ETA: 8s - loss: 0.6665 - acc: 0.590 - ETA: 8s - loss: 0.6665 - acc: 0.590 - ETA: 7s - loss: 0.6665 - acc: 0.590 - ETA: 7s - loss: 0.6665 - acc: 0.590 - ETA: 7s - loss: 0.6665 - acc: 0.590 - ETA: 6s - loss: 0.6665 - acc: 0.590 - ETA: 6s - loss: 0.6665 - acc: 0.590 - ETA: 6s - loss: 0.6665 - acc: 0.590 - ETA: 5s - loss: 0.6665 - acc: 0.590 - ETA: 5s - loss: 0.6665 - acc: 0.590 - ETA: 5s - loss: 0.6665 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 4s - loss: 0.6664 - acc: 0.590 - ETA: 3s - loss: 0.6664 - acc: 0.590 - ETA: 3s - loss: 0.6664 - acc: 0.590 - ETA: 3s - loss: 0.6664 - acc: 0.590 - ETA: 3s - loss: 0.6664 - acc: 0.590 - ETA: 2s - loss: 0.6664 - acc: 0.590 - ETA: 2s - loss: 0.6664 - acc: 0.590 - ETA: 2s - loss: 0.6664 - acc: 0.590 - ETA: 1s - loss: 0.6664 - acc: 0.590 - ETA: 1s - loss: 0.6664 - acc: 0.590 - ETA: 1s - loss: 0.6664 - acc: 0.590 - ETA: 0s - loss: 0.6664 - acc: 0.590 - ETA: 0s - loss: 0.6665 - acc: 0.590 - ETA: 0s - loss: 0.6665 - acc: 0.590 - 16s 6us/step - loss: 0.6665 - acc: 0.5906 - val_loss: 0.6543 - val_acc: 0.6242 Epoch 30/1000 2497802/2497802 [==============================] - ETA: 16s - loss: 0.6671 - acc: 0.58 - ETA: 14s - loss: 0.6668 - acc: 0.58 - ETA: 13s - loss: 0.6668 - acc: 0.58 - ETA: 13s - loss: 0.6669 - acc: 0.58 - ETA: 12s - loss: 0.6666 - acc: 0.58 - ETA: 12s - loss: 0.6663 - acc: 0.59 - ETA: 12s - loss: 0.6661 - acc: 0.59 - ETA: 11s - loss: 0.6661 - acc: 0.59 - ETA: 11s - loss: 0.6662 - acc: 0.59 - ETA: 11s - loss: 0.6664 - acc: 0.58 - ETA: 10s - loss: 0.6665 - acc: 0.58 - ETA: 10s - loss: 0.6665 - acc: 0.58 - ETA: 10s - loss: 0.6664 - acc: 0.59 - ETA: 10s - loss: 0.6663 - acc: 0.59 - ETA: 9s - loss: 0.6664 - acc: 0.5901 - ETA: 9s - loss: 0.6663 - acc: 0.590 - ETA: 9s - loss: 0.6664 - acc: 0.590 - ETA: 9s - loss: 0.6664 - acc: 0.590 - ETA: 8s - loss: 0.6663 - acc: 0.590 - ETA: 8s - loss: 0.6663 - acc: 0.590 - ETA: 8s - loss: 0.6663 - acc: 0.590 - ETA: 8s - loss: 0.6664 - acc: 0.590 - ETA: 7s - loss: 0.6663 - acc: 0.590 - ETA: 7s - loss: 0.6664 - acc: 0.590 - ETA: 7s - loss: 0.6664 - acc: 0.590 - ETA: 6s - loss: 0.6664 - acc: 0.590 - ETA: 6s - loss: 0.6664 - acc: 0.590 - ETA: 6s - loss: 0.6664 - acc: 0.590 - ETA: 5s - loss: 0.6664 - acc: 0.590 - ETA: 5s - loss: 0.6663 - acc: 0.590 - ETA: 5s - loss: 0.6664 - acc: 0.590 - ETA: 5s - loss: 0.6664 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6666 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6666 - acc: 0.590 - ETA: 3s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.589 - ETA: 2s - loss: 0.6667 - acc: 0.589 - ETA: 2s - loss: 0.6666 - acc: 0.589 - ETA: 1s - loss: 0.6666 - acc: 0.589 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.589 - ETA: 0s - loss: 0.6666 - acc: 0.589 - ETA: 0s - loss: 0.6666 - acc: 0.589 - 15s 6us/step - loss: 0.6666 - acc: 0.5899 - val_loss: 0.6539 - val_acc: 0.6242 Epoch 31/1000 2497802/2497802 [==============================] - ETA: 17s - loss: 0.6654 - acc: 0.59 - ETA: 15s - loss: 0.6663 - acc: 0.59 - ETA: 14s - loss: 0.6674 - acc: 0.58 - ETA: 13s - loss: 0.6673 - acc: 0.58 - ETA: 13s - loss: 0.6672 - acc: 0.58 - ETA: 13s - loss: 0.6671 - acc: 0.58 - ETA: 12s - loss: 0.6669 - acc: 0.58 - ETA: 12s - loss: 0.6668 - acc: 0.58 - ETA: 12s - loss: 0.6671 - acc: 0.58 - ETA: 11s - loss: 0.6671 - acc: 0.58 - ETA: 11s - loss: 0.6671 - acc: 0.58 - ETA: 11s - loss: 0.6671 - acc: 0.58 - ETA: 10s - loss: 0.6670 - acc: 0.58 - ETA: 10s - loss: 0.6671 - acc: 0.58 - ETA: 10s - loss: 0.6671 - acc: 0.58 - ETA: 10s - loss: 0.6670 - acc: 0.58 - ETA: 9s - loss: 0.6669 - acc: 0.5894 - ETA: 9s - loss: 0.6669 - acc: 0.589 - ETA: 9s - loss: 0.6668 - acc: 0.589 - ETA: 8s - loss: 0.6667 - acc: 0.589 - ETA: 8s - loss: 0.6667 - acc: 0.589 - ETA: 8s - loss: 0.6667 - acc: 0.589 - ETA: 8s - loss: 0.6667 - acc: 0.589 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.589 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6665 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6666 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6665 - acc: 0.590 - 16s 6us/step - loss: 0.6665 - acc: 0.5904 - val_loss: 0.6543 - val_acc: 0.6244 Epoch 00031: early stopping Wall time: 7min 54s
<keras.callbacks.History at 0x2689253c2e8>
%%time
# Fit the model
classifier.fit(X_train, y_train, epochs=1000, batch_size=200000,validation_data=(X_val, y_val),callbacks=callbacks2)
Train on 2497802 samples, validate on 624451 samples Epoch 1/1000 2497802/2497802 [==============================] - ETA: 15s - loss: 0.6667 - acc: 0.59 - ETA: 13s - loss: 0.6662 - acc: 0.59 - ETA: 12s - loss: 0.6662 - acc: 0.59 - ETA: 10s - loss: 0.6663 - acc: 0.59 - ETA: 9s - loss: 0.6664 - acc: 0.5908 - ETA: 7s - loss: 0.6664 - acc: 0.590 - ETA: 6s - loss: 0.6663 - acc: 0.590 - ETA: 5s - loss: 0.6663 - acc: 0.590 - ETA: 4s - loss: 0.6664 - acc: 0.590 - ETA: 3s - loss: 0.6664 - acc: 0.590 - ETA: 1s - loss: 0.6663 - acc: 0.590 - ETA: 0s - loss: 0.6663 - acc: 0.590 - 17s 7us/step - loss: 0.6664 - acc: 0.5905 - val_loss: 0.6541 - val_acc: 0.6243 Epoch 2/1000 2497802/2497802 [==============================] - ETA: 16s - loss: 0.6663 - acc: 0.59 - ETA: 14s - loss: 0.6667 - acc: 0.59 - ETA: 12s - loss: 0.6668 - acc: 0.58 - ETA: 11s - loss: 0.6666 - acc: 0.58 - ETA: 9s - loss: 0.6665 - acc: 0.5899 - ETA: 8s - loss: 0.6665 - acc: 0.589 - ETA: 7s - loss: 0.6665 - acc: 0.590 - ETA: 5s - loss: 0.6665 - acc: 0.590 - ETA: 4s - loss: 0.6664 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 1s - loss: 0.6665 - acc: 0.590 - ETA: 0s - loss: 0.6664 - acc: 0.590 - 18s 7us/step - loss: 0.6664 - acc: 0.5902 - val_loss: 0.6541 - val_acc: 0.6244 Epoch 3/1000 2497802/2497802 [==============================] - ETA: 16s - loss: 0.6660 - acc: 0.59 - ETA: 13s - loss: 0.6663 - acc: 0.59 - ETA: 12s - loss: 0.6663 - acc: 0.59 - ETA: 10s - loss: 0.6667 - acc: 0.59 - ETA: 9s - loss: 0.6666 - acc: 0.5902 - ETA: 7s - loss: 0.6665 - acc: 0.590 - ETA: 6s - loss: 0.6665 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6666 - acc: 0.590 - 16s 6us/step - loss: 0.6666 - acc: 0.5902 - val_loss: 0.6542 - val_acc: 0.6244 Epoch 4/1000 2497802/2497802 [==============================] - ETA: 14s - loss: 0.6656 - acc: 0.59 - ETA: 12s - loss: 0.6658 - acc: 0.59 - ETA: 11s - loss: 0.6661 - acc: 0.59 - ETA: 10s - loss: 0.6660 - acc: 0.59 - ETA: 8s - loss: 0.6661 - acc: 0.5910 - ETA: 7s - loss: 0.6662 - acc: 0.590 - ETA: 6s - loss: 0.6663 - acc: 0.590 - ETA: 5s - loss: 0.6663 - acc: 0.590 - ETA: 4s - loss: 0.6663 - acc: 0.590 - ETA: 2s - loss: 0.6663 - acc: 0.590 - ETA: 1s - loss: 0.6662 - acc: 0.590 - ETA: 0s - loss: 0.6662 - acc: 0.590 - 15s 6us/step - loss: 0.6662 - acc: 0.5907 - val_loss: 0.6540 - val_acc: 0.6244 Epoch 5/1000 2497802/2497802 [==============================] - ETA: 13s - loss: 0.6665 - acc: 0.59 - ETA: 12s - loss: 0.6663 - acc: 0.59 - ETA: 11s - loss: 0.6662 - acc: 0.59 - ETA: 10s - loss: 0.6661 - acc: 0.59 - ETA: 8s - loss: 0.6665 - acc: 0.5907 - ETA: 7s - loss: 0.6665 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 4s - loss: 0.6667 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6667 - acc: 0.590 - 16s 6us/step - loss: 0.6666 - acc: 0.5904 - val_loss: 0.6544 - val_acc: 0.6243 Epoch 6/1000 2497802/2497802 [==============================] - ETA: 14s - loss: 0.6657 - acc: 0.59 - ETA: 12s - loss: 0.6665 - acc: 0.58 - ETA: 11s - loss: 0.6664 - acc: 0.59 - ETA: 10s - loss: 0.6667 - acc: 0.58 - ETA: 8s - loss: 0.6667 - acc: 0.5899 - ETA: 7s - loss: 0.6667 - acc: 0.590 - ETA: 6s - loss: 0.6665 - acc: 0.590 - ETA: 5s - loss: 0.6664 - acc: 0.590 - ETA: 4s - loss: 0.6663 - acc: 0.590 - ETA: 2s - loss: 0.6663 - acc: 0.590 - ETA: 1s - loss: 0.6664 - acc: 0.590 - ETA: 0s - loss: 0.6664 - acc: 0.590 - 15s 6us/step - loss: 0.6664 - acc: 0.5902 - val_loss: 0.6540 - val_acc: 0.6244 Epoch 7/1000 2497802/2497802 [==============================] - ETA: 13s - loss: 0.6660 - acc: 0.59 - ETA: 12s - loss: 0.6660 - acc: 0.59 - ETA: 11s - loss: 0.6666 - acc: 0.59 - ETA: 9s - loss: 0.6663 - acc: 0.5907 - ETA: 8s - loss: 0.6663 - acc: 0.590 - ETA: 7s - loss: 0.6664 - acc: 0.590 - ETA: 6s - loss: 0.6664 - acc: 0.590 - ETA: 5s - loss: 0.6663 - acc: 0.590 - ETA: 4s - loss: 0.6664 - acc: 0.590 - ETA: 2s - loss: 0.6663 - acc: 0.590 - ETA: 1s - loss: 0.6662 - acc: 0.591 - ETA: 0s - loss: 0.6662 - acc: 0.591 - 16s 6us/step - loss: 0.6662 - acc: 0.5911 - val_loss: 0.6540 - val_acc: 0.6244 Epoch 8/1000 2497802/2497802 [==============================] - ETA: 13s - loss: 0.6662 - acc: 0.58 - ETA: 12s - loss: 0.6666 - acc: 0.58 - ETA: 11s - loss: 0.6665 - acc: 0.58 - ETA: 9s - loss: 0.6668 - acc: 0.5895 - ETA: 8s - loss: 0.6667 - acc: 0.589 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6666 - acc: 0.590 - ETA: 5s - loss: 0.6666 - acc: 0.590 - ETA: 3s - loss: 0.6667 - acc: 0.590 - ETA: 2s - loss: 0.6666 - acc: 0.590 - ETA: 1s - loss: 0.6666 - acc: 0.590 - ETA: 0s - loss: 0.6667 - acc: 0.590 - 15s 6us/step - loss: 0.6667 - acc: 0.5901 - val_loss: 0.6544 - val_acc: 0.6243 Epoch 9/1000 2497802/2497802 [==============================] - ETA: 14s - loss: 0.6662 - acc: 0.59 - ETA: 12s - loss: 0.6664 - acc: 0.59 - ETA: 11s - loss: 0.6665 - acc: 0.59 - ETA: 9s - loss: 0.6663 - acc: 0.5907 - ETA: 8s - loss: 0.6664 - acc: 0.590 - ETA: 7s - loss: 0.6664 - acc: 0.590 - ETA: 6s - loss: 0.6663 - acc: 0.590 - ETA: 5s - loss: 0.6663 - acc: 0.590 - ETA: 4s - loss: 0.6662 - acc: 0.590 - ETA: 2s - loss: 0.6662 - acc: 0.590 - ETA: 1s - loss: 0.6661 - acc: 0.591 - ETA: 0s - loss: 0.6661 - acc: 0.591 - 15s 6us/step - loss: 0.6661 - acc: 0.5910 - val_loss: 0.6539 - val_acc: 0.6243 Epoch 10/1000 2497802/2497802 [==============================] - ETA: 13s - loss: 0.6661 - acc: 0.58 - ETA: 12s - loss: 0.6660 - acc: 0.59 - ETA: 11s - loss: 0.6661 - acc: 0.59 - ETA: 10s - loss: 0.6659 - acc: 0.59 - ETA: 9s - loss: 0.6659 - acc: 0.5909 - ETA: 8s - loss: 0.6661 - acc: 0.590 - ETA: 6s - loss: 0.6662 - acc: 0.590 - ETA: 5s - loss: 0.6663 - acc: 0.590 - ETA: 4s - loss: 0.6662 - acc: 0.590 - ETA: 3s - loss: 0.6662 - acc: 0.590 - ETA: 1s - loss: 0.6662 - acc: 0.590 - ETA: 0s - loss: 0.6662 - acc: 0.590 - 16s 6us/step - loss: 0.6663 - acc: 0.5906 - val_loss: 0.6541 - val_acc: 0.6243 Epoch 11/1000 2497802/2497802 [==============================] - ETA: 14s - loss: 0.6658 - acc: 0.59 - ETA: 13s - loss: 0.6662 - acc: 0.59 - ETA: 11s - loss: 0.6663 - acc: 0.59 - ETA: 10s - loss: 0.6663 - acc: 0.59 - ETA: 9s - loss: 0.6663 - acc: 0.5907 - ETA: 8s - loss: 0.6664 - acc: 0.590 - ETA: 7s - loss: 0.6664 - acc: 0.590 - ETA: 5s - loss: 0.6663 - acc: 0.590 - ETA: 4s - loss: 0.6664 - acc: 0.590 - ETA: 3s - loss: 0.6665 - acc: 0.590 - ETA: 1s - loss: 0.6665 - acc: 0.590 - ETA: 0s - loss: 0.6665 - acc: 0.590 - 17s 7us/step - loss: 0.6665 - acc: 0.5902 - val_loss: 0.6543 - val_acc: 0.6243 Epoch 12/1000 2497802/2497802 [==============================] - ETA: 15s - loss: 0.6661 - acc: 0.59 - ETA: 14s - loss: 0.6661 - acc: 0.59 - ETA: 13s - loss: 0.6663 - acc: 0.59 - ETA: 11s - loss: 0.6663 - acc: 0.59 - ETA: 10s - loss: 0.6662 - acc: 0.59 - ETA: 9s - loss: 0.6662 - acc: 0.5914 - ETA: 7s - loss: 0.6662 - acc: 0.591 - ETA: 6s - loss: 0.6662 - acc: 0.591 - ETA: 4s - loss: 0.6663 - acc: 0.591 - ETA: 3s - loss: 0.6664 - acc: 0.590 - ETA: 2s - loss: 0.6664 - acc: 0.590 - ETA: 0s - loss: 0.6664 - acc: 0.590 - 18s 7us/step - loss: 0.6664 - acc: 0.5907 - val_loss: 0.6542 - val_acc: 0.6242 Epoch 13/1000 2497802/2497802 [==============================] - ETA: 15s - loss: 0.6664 - acc: 0.59 - ETA: 13s - loss: 0.6665 - acc: 0.59 - ETA: 11s - loss: 0.6664 - acc: 0.58 - ETA: 10s - loss: 0.6665 - acc: 0.59 - ETA: 9s - loss: 0.6663 - acc: 0.5906 - ETA: 7s - loss: 0.6663 - acc: 0.590 - ETA: 6s - loss: 0.6664 - acc: 0.590 - ETA: 5s - loss: 0.6664 - acc: 0.590 - ETA: 4s - loss: 0.6664 - acc: 0.590 - ETA: 3s - loss: 0.6664 - acc: 0.590 - ETA: 1s - loss: 0.6664 - acc: 0.590 - ETA: 0s - loss: 0.6664 - acc: 0.590 - 16s 7us/step - loss: 0.6664 - acc: 0.5904 - val_loss: 0.6541 - val_acc: 0.6244 Epoch 14/1000 2497802/2497802 [==============================] - ETA: 15s - loss: 0.6665 - acc: 0.59 - ETA: 14s - loss: 0.6666 - acc: 0.58 - ETA: 12s - loss: 0.6666 - acc: 0.59 - ETA: 11s - loss: 0.6665 - acc: 0.59 - ETA: 9s - loss: 0.6663 - acc: 0.5906 - ETA: 8s - loss: 0.6664 - acc: 0.590 - ETA: 7s - loss: 0.6665 - acc: 0.590 - ETA: 5s - loss: 0.6664 - acc: 0.590 - ETA: 4s - loss: 0.6664 - acc: 0.590 - ETA: 3s - loss: 0.6663 - acc: 0.590 - ETA: 1s - loss: 0.6663 - acc: 0.590 - ETA: 0s - loss: 0.6663 - acc: 0.590 - 17s 7us/step - loss: 0.6663 - acc: 0.5905 - val_loss: 0.6540 - val_acc: 0.6242 Epoch 15/1000 2497802/2497802 [==============================] - ETA: 15s - loss: 0.6656 - acc: 0.59 - ETA: 14s - loss: 0.6660 - acc: 0.59 - ETA: 13s - loss: 0.6665 - acc: 0.59 - ETA: 11s - loss: 0.6665 - acc: 0.59 - ETA: 10s - loss: 0.6663 - acc: 0.59 - ETA: 9s - loss: 0.6663 - acc: 0.5906 - ETA: 7s - loss: 0.6663 - acc: 0.590 - ETA: 6s - loss: 0.6663 - acc: 0.590 - ETA: 4s - loss: 0.6664 - acc: 0.590 - ETA: 3s - loss: 0.6664 - acc: 0.590 - ETA: 2s - loss: 0.6664 - acc: 0.590 - ETA: 0s - loss: 0.6664 - acc: 0.590 - 18s 7us/step - loss: 0.6664 - acc: 0.5906 - val_loss: 0.6543 - val_acc: 0.6242 Epoch 16/1000 2497802/2497802 [==============================] - ETA: 13s - loss: 0.6664 - acc: 0.59 - ETA: 12s - loss: 0.6665 - acc: 0.59 - ETA: 11s - loss: 0.6663 - acc: 0.59 - ETA: 10s - loss: 0.6663 - acc: 0.59 - ETA: 9s - loss: 0.6663 - acc: 0.5905 - ETA: 7s - loss: 0.6663 - acc: 0.590 - ETA: 6s - loss: 0.6663 - acc: 0.590 - ETA: 5s - loss: 0.6662 - acc: 0.590 - ETA: 4s - loss: 0.6662 - acc: 0.590 - ETA: 3s - loss: 0.6663 - acc: 0.590 - ETA: 1s - loss: 0.6663 - acc: 0.590 - ETA: 0s - loss: 0.6663 - acc: 0.590 - 17s 7us/step - loss: 0.6663 - acc: 0.5907 - val_loss: 0.6541 - val_acc: 0.6243 Epoch 17/1000 2497802/2497802 [==============================] - ETA: 16s - loss: 0.6664 - acc: 0.59 - ETA: 14s - loss: 0.6660 - acc: 0.59 - ETA: 13s - loss: 0.6658 - acc: 0.59 - ETA: 11s - loss: 0.6662 - acc: 0.59 - ETA: 10s - loss: 0.6662 - acc: 0.59 - ETA: 9s - loss: 0.6663 - acc: 0.5902 - ETA: 7s - loss: 0.6664 - acc: 0.590 - ETA: 6s - loss: 0.6664 - acc: 0.590 - ETA: 4s - loss: 0.6665 - acc: 0.590 - ETA: 3s - loss: 0.6666 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - ETA: 0s - loss: 0.6664 - acc: 0.590 - 18s 7us/step - loss: 0.6664 - acc: 0.5903 - val_loss: 0.6542 - val_acc: 0.6244 Epoch 18/1000 2497802/2497802 [==============================] - ETA: 14s - loss: 0.6662 - acc: 0.59 - ETA: 13s - loss: 0.6666 - acc: 0.59 - ETA: 11s - loss: 0.6664 - acc: 0.59 - ETA: 10s - loss: 0.6666 - acc: 0.59 - ETA: 9s - loss: 0.6665 - acc: 0.5901 - ETA: 8s - loss: 0.6664 - acc: 0.590 - ETA: 7s - loss: 0.6664 - acc: 0.590 - ETA: 5s - loss: 0.6665 - acc: 0.590 - ETA: 4s - loss: 0.6664 - acc: 0.590 - ETA: 3s - loss: 0.6664 - acc: 0.590 - ETA: 1s - loss: 0.6664 - acc: 0.590 - ETA: 0s - loss: 0.6664 - acc: 0.590 - 17s 7us/step - loss: 0.6664 - acc: 0.5906 - val_loss: 0.6541 - val_acc: 0.6244 Epoch 19/1000 2497802/2497802 [==============================] - ETA: 13s - loss: 0.6673 - acc: 0.58 - ETA: 12s - loss: 0.6668 - acc: 0.59 - ETA: 11s - loss: 0.6667 - acc: 0.59 - ETA: 10s - loss: 0.6666 - acc: 0.58 - ETA: 9s - loss: 0.6666 - acc: 0.5900 - ETA: 7s - loss: 0.6666 - acc: 0.590 - ETA: 6s - loss: 0.6665 - acc: 0.590 - ETA: 5s - loss: 0.6665 - acc: 0.590 - ETA: 4s - loss: 0.6664 - acc: 0.590 - ETA: 3s - loss: 0.6664 - acc: 0.590 - ETA: 1s - loss: 0.6663 - acc: 0.590 - ETA: 0s - loss: 0.6663 - acc: 0.590 - 16s 6us/step - loss: 0.6664 - acc: 0.5905 - val_loss: 0.6540 - val_acc: 0.6244 Epoch 00019: early stopping Wall time: 5min 12s
<keras.callbacks.History at 0x26879bcb2b0>
%%time
# Fit the model
classifier.fit(X_train, y_train, epochs=1000, batch_size=1000000,validation_data=(X_val, y_val),callbacks=callbacks2)
Train on 2497802 samples, validate on 624451 samples Epoch 1/1000 2497802/2497802 [==============================] - ETA: 9s - loss: 0.6662 - acc: 0.590 - ETA: 2s - loss: 0.6663 - acc: 0.591 - 16s 6us/step - loss: 0.6663 - acc: 0.5910 - val_loss: 0.6540 - val_acc: 0.6244 Epoch 2/1000 2497802/2497802 [==============================] - ETA: 8s - loss: 0.6661 - acc: 0.591 - ETA: 2s - loss: 0.6662 - acc: 0.590 - 15s 6us/step - loss: 0.6663 - acc: 0.5909 - val_loss: 0.6541 - val_acc: 0.6243 Epoch 3/1000 2497802/2497802 [==============================] - ETA: 9s - loss: 0.6665 - acc: 0.589 - ETA: 2s - loss: 0.6664 - acc: 0.590 - 16s 6us/step - loss: 0.6664 - acc: 0.5904 - val_loss: 0.6541 - val_acc: 0.6242 Epoch 4/1000 2497802/2497802 [==============================] - ETA: 9s - loss: 0.6662 - acc: 0.591 - ETA: 3s - loss: 0.6662 - acc: 0.590 - 16s 7us/step - loss: 0.6663 - acc: 0.5906 - val_loss: 0.6542 - val_acc: 0.6242 Epoch 5/1000 2497802/2497802 [==============================] - ETA: 9s - loss: 0.6664 - acc: 0.590 - ETA: 2s - loss: 0.6664 - acc: 0.590 - 16s 6us/step - loss: 0.6664 - acc: 0.5903 - val_loss: 0.6542 - val_acc: 0.6242 Epoch 6/1000 2497802/2497802 [==============================] - ETA: 9s - loss: 0.6661 - acc: 0.590 - ETA: 3s - loss: 0.6663 - acc: 0.590 - 16s 6us/step - loss: 0.6664 - acc: 0.5906 - val_loss: 0.6542 - val_acc: 0.6243 Epoch 7/1000 2497802/2497802 [==============================] - ETA: 8s - loss: 0.6668 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - 15s 6us/step - loss: 0.6664 - acc: 0.5906 - val_loss: 0.6542 - val_acc: 0.6244 Epoch 8/1000 2497802/2497802 [==============================] - ETA: 9s - loss: 0.6663 - acc: 0.591 - ETA: 2s - loss: 0.6663 - acc: 0.590 - 15s 6us/step - loss: 0.6663 - acc: 0.5906 - val_loss: 0.6541 - val_acc: 0.6244 Epoch 9/1000 2497802/2497802 [==============================] - ETA: 8s - loss: 0.6663 - acc: 0.590 - ETA: 2s - loss: 0.6664 - acc: 0.590 - 15s 6us/step - loss: 0.6665 - acc: 0.5905 - val_loss: 0.6541 - val_acc: 0.6244 Epoch 10/1000 2497802/2497802 [==============================] - ETA: 8s - loss: 0.6663 - acc: 0.590 - ETA: 2s - loss: 0.6665 - acc: 0.590 - 15s 6us/step - loss: 0.6664 - acc: 0.5901 - val_loss: 0.6541 - val_acc: 0.6244 Epoch 11/1000 2497802/2497802 [==============================] - ETA: 8s - loss: 0.6659 - acc: 0.591 - ETA: 2s - loss: 0.6661 - acc: 0.590 - 16s 6us/step - loss: 0.6661 - acc: 0.5908 - val_loss: 0.6541 - val_acc: 0.6244 Epoch 00011: early stopping Wall time: 2min 51s
<keras.callbacks.History at 0x267859aaa90>
# evaluate the model
scores = classifier.evaluate(X_val, y_val)
print("\n%s: %.2f%%" % (classifier.metrics_names[1], scores[1]*100))
624451/624451 [==============================] - ETA: 2: - ETA: 9s - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - ETA: - 7s 11us/step acc: 62.44%
%%time
predictions=classifier.predict(X_test)
Wall time: 11.3 s
%%time
#predictions_scores=model.predict_proba(X_test)
Wall time: 0 ns
preds=[1 if x>=0.5 else 0 for x in predictions]
confusion_matrix(y_test,preds)
array([[424152, 244775],
[259915, 409267]], dtype=int64)
accuracy_score(y_test,preds)
0.6228334164107707
recall_score(y_test,preds)
0.6115929597628149
precision_score(y_test,preds)
0.6257503340764049
roc_auc_score(y_test,predictions)
0.6730229533169875
predictions=classifier.predict(df_test[important_columns])
solution['HasDetections']=predictions
solution.to_csv('submit_31st.csv',index=False)
from sklearn.ensemble import AdaBoostClassifier,VotingClassifier
C:\Users\gandh\Anaconda3\lib\site-packages\sklearn\ensemble\weight_boosting.py:29: DeprecationWarning: numpy.core.umath_tests is an internal NumPy module and should not be imported. It will be removed in a future NumPy release. from numpy.core.umath_tests import inner1d
clf1=LGBMClassifier(boosting_type='gbdt', class_weight=None, colsample_bytree=0.9,
importance_type='split', learning_rate=0.15, max_depth=15,
min_child_samples=20, min_child_weight=0.001, min_split_gain=0.0,
n_estimators=1000, n_jobs=-1, num_leaves=200, objective=None,
random_state=42, reg_alpha=0.5, reg_lambda=0.0, silent=False,
subsample=0.9, subsample_for_bin=200000, subsample_freq=5)
clf2=AdaBoostClassifier(learning_rate=0.1,n_estimators=100,random_state=42)
vc=VotingClassifier(estimators=[('lgb',clf1),('adb',clf2)],voting='soft',weights=[2,1])
import gc
gc.collect()
0
%%time
#vc.fit(X_train[important_columns_01],y_train)
Wall time: 0 ns
%%time
clf2.fit(X_train,y_train)
Wall time: 31min 47s
AdaBoostClassifier(algorithm='SAMME.R', base_estimator=None,
learning_rate=0.1, n_estimators=100, random_state=42)
predictions=clf2.predict(X_test)
prediction_scores=clf2.predict_proba(X_test)
scores=[]
for score in prediction_scores:
scores.append(score[1])
print("Confusion Matrix for the Model : "+"\n",confusion_matrix(y_test,predictions))
print("Accuracy of the model : ",accuracy_score(y_test,predictions))
print("Precision of the Model : ",precision_score(y_test,predictions))
print("Recall score of the Model : ",recall_score(y_test,predictions))
print("Area under ROC curve for the Model : ",roc_auc_score(y_test,scores))
Confusion Matrix for the Model : [[368338 300589] [212696 456486]] Accuracy of the model : 0.6164101728633468 Precision of the Model : 0.6029600766106397 Recall score of the Model : 0.6821552283235353 Area under ROC curve for the Model : 0.6695695166372357
# Cross-validation
from sklearn.model_selection import KFold, StratifiedKFold, KFold #for K-fold cross validation
from sklearn.model_selection import cross_val_score #score evaluation
from sklearn.model_selection import cross_val_predict #prediction
from sklearn.model_selection import cross_validate
import lightgbm as lgb
#kf = StratifiedKFold(random_state=42,shuffle=False,n_splits=5)
clf=LGBMClassifier(boosting_type='gbdt', class_weight=None, colsample_bytree=0.9,
importance_type='split', learning_rate=0.15, max_depth=-1,
min_child_samples=20, min_child_weight=0.001, min_split_gain=0.0,
n_estimators=1000, n_jobs=-1, objective=None,
random_state=42, reg_alpha=0.7, reg_lambda=0.7, silent=False,
subsample=0.9,subsample_freq=5)
param = {'objective':'binary',
'boosting': 'gbdt',
'learning_rate': 0.05,
'max_depth': -1,
'num_leaves': 100,
'sub_feature': 0.9,
'sub_row':0.9,
'lambda_l1': 0.6,
'lambda_l2': 0.6,
"random_state": 133,
"verbosity": -1}
features=X.columns.tolist()
import gc
Stratified 5 fold cross validation referred from https://www.kaggle.com/roydatascience/light-gbm-on-stratified-k-folds-malwares
%%time
max_iter = 5
#categorical_columns = [c for c in categorical_columns if c not in ['MachineIdentifier']]
#features = [c for c in train.columns if c not in ['MachineIdentifier']]
gc.collect()
folds = StratifiedKFold(n_splits=5, shuffle=True, random_state=15)
oof = np.zeros(len(X))
predictions = np.zeros(len(df_test))
feature_importance_df = pd.DataFrame()
score = [0 for _ in range(folds.n_splits)]
for fold_, (trn_idx, val_idx) in enumerate(folds.split(X,y.values)):
print("fold {}".format(fold_))
trn_data = lgb.Dataset(X.iloc[trn_idx][features], label=y.iloc[trn_idx])
val_data = lgb.Dataset(X.iloc[val_idx][features], label=y.iloc[val_idx])
num_round = 1000
clf = lgb.train(param, trn_data, num_round, valid_sets = [trn_data, val_data], verbose_eval=-1, early_stopping_rounds = 200)
oof[val_idx] = clf.predict(X.iloc[val_idx][features], num_iteration=clf.best_iteration)
fold_importance_df = pd.DataFrame()
fold_importance_df["feature"] = features
fold_importance_df["importance"] = clf.feature_importance(importance_type='gain')
fold_importance_df["fold"] = fold_ + 1
feature_importance_df = pd.concat([feature_importance_df, fold_importance_df], axis=0)
# we perform predictions by chunks
initial_idx = 0
chunk_size = 1000000
current_pred = np.zeros(len(df_test))
while initial_idx < df_test.shape[0]:
final_idx = min(initial_idx + chunk_size, df_test.shape[0])
idx = range(initial_idx, final_idx)
current_pred[idx] = clf.predict(df_test.iloc[idx][features], num_iteration=clf.best_iteration)
initial_idx = final_idx
predictions += current_pred / min(folds.n_splits, max_iter)
score[fold_] = roc_auc_score(y.iloc[val_idx], oof[val_idx])
if fold_ == max_iter - 1: break
if (folds.n_splits == max_iter):
print("CV score: {:<8.5f}".format(roc_auc_score(y, oof)))
else:
print("CV score: {:<8.5f}".format(sum(score) / max_iter))
fold 0 Training until validation scores don't improve for 200 rounds. Did not meet early stopping. Best iteration is: [1000] training's binary_logloss: 0.596156 valid_1's binary_logloss: 0.604861 fold 1 Training until validation scores don't improve for 200 rounds. Did not meet early stopping. Best iteration is: [1000] training's binary_logloss: 0.595986 valid_1's binary_logloss: 0.60519 fold 2 Training until validation scores don't improve for 200 rounds. Did not meet early stopping. Best iteration is: [1000] training's binary_logloss: 0.596275 valid_1's binary_logloss: 0.605196 fold 3 Training until validation scores don't improve for 200 rounds. Did not meet early stopping. Best iteration is: [1000] training's binary_logloss: 0.596253 valid_1's binary_logloss: 0.604526 fold 4 Training until validation scores don't improve for 200 rounds. Did not meet early stopping. Best iteration is: [1000] training's binary_logloss: 0.59619 valid_1's binary_logloss: 0.604496 CV score: 0.72831 Wall time: 1h 35min 17s
cols = (feature_importance_df[["feature", "importance"]]
.groupby("feature")
.mean()
.sort_values(by="importance", ascending=False)[:1000].index)
best_features = feature_importance_df.loc[feature_importance_df.feature.isin(cols)]
plt.figure(figsize=(14,25))
sns.barplot(x="importance",
y="feature",
data=best_features.sort_values(by="importance",
ascending=False))
plt.title('LightGBM Features (avg over folds)')
plt.tight_layout()
plt.savefig('lgbm_importances2.png')
#solution=pd.DataFrame()
#solution['MachineIdentifier']=df_test['MachineIdentifier']
solution['HasDetections']=predictions
solution.to_csv('submit_31st.csv',index=False)